mirror of
https://github.com/jojo61/vdr-plugin-softhdcuvid.git
synced 2023-10-10 13:37:41 +02:00
Merge branch 'master' into Switch-to-posix-compaatible-sched_yield
This commit is contained in:
commit
1bf5a841e9
37
.indent.pro
vendored
Normal file
37
.indent.pro
vendored
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
--blank-lines-before-block-comments
|
||||||
|
--blank-lines-after-declarations
|
||||||
|
--blank-lines-after-procedures
|
||||||
|
--no-blank-lines-after-commas
|
||||||
|
--braces-on-if-line
|
||||||
|
--no-blank-before-sizeof
|
||||||
|
--comment-indentation41
|
||||||
|
--declaration-comment-column41
|
||||||
|
--no-comment-delimiters-on-blank-lines
|
||||||
|
--swallow-optional-blank-lines
|
||||||
|
--dont-format-comments
|
||||||
|
--parameter-indentation4
|
||||||
|
--indent-level4
|
||||||
|
--line-comments-indentation0
|
||||||
|
--cuddle-else
|
||||||
|
--cuddle-do-while
|
||||||
|
--brace-indent0
|
||||||
|
--case-brace-indentation0
|
||||||
|
//--start-left-side-of-comments
|
||||||
|
--leave-preprocessor-space
|
||||||
|
//--continuation-indentation8
|
||||||
|
--case-indentation4
|
||||||
|
--else-endif-column0
|
||||||
|
--no-space-after-casts
|
||||||
|
--declaration-indentation1
|
||||||
|
--dont-line-up-parentheses
|
||||||
|
--no-space-after-function-call-names
|
||||||
|
--space-special-semicolon
|
||||||
|
--tab-size4
|
||||||
|
--no-tabs
|
||||||
|
--line-length119
|
||||||
|
--comment-line-length119
|
||||||
|
--honour-newlines
|
||||||
|
--dont-break-procedure-type
|
||||||
|
--break-before-boolean-operator
|
||||||
|
--continuation-indentation4
|
||||||
|
--ignore-newlines
|
2
Makefile
2
Makefile
@ -348,8 +348,6 @@ HDRS= $(wildcard *.h)
|
|||||||
indent:
|
indent:
|
||||||
for i in $(SRCS) $(HDRS); do \
|
for i in $(SRCS) $(HDRS); do \
|
||||||
indent $$i; \
|
indent $$i; \
|
||||||
unexpand -a $$i | sed -e s/constconst/const/ > $$i.up; \
|
|
||||||
mv $$i.up $$i; \
|
|
||||||
done
|
done
|
||||||
|
|
||||||
video_test: video.c Makefile
|
video_test: video.c Makefile
|
||||||
|
391
audio.c
391
audio.c
@ -109,15 +109,15 @@ typedef struct _audio_module_
|
|||||||
{
|
{
|
||||||
const char *Name; ///< audio output module name
|
const char *Name; ///< audio output module name
|
||||||
|
|
||||||
int (*const Thread) (void); ///< module thread handler
|
int (*const Thread)(void); ///< module thread handler
|
||||||
void (*const FlushBuffers) (void); ///< flush sample buffers
|
void (*const FlushBuffers)(void); ///< flush sample buffers
|
||||||
int64_t(*const GetDelay) (void); ///< get current audio delay
|
int64_t(*const GetDelay) (void); ///< get current audio delay
|
||||||
void (*const SetVolume) (int); ///< set output volume
|
void (*const SetVolume)(int); ///< set output volume
|
||||||
int (*const Setup) (int *, int *, int); ///< setup channels, samplerate
|
int (*const Setup)(int *, int *, int); ///< setup channels, samplerate
|
||||||
void (*const Play) (void); ///< play audio
|
void (*const Play)(void); ///< play audio
|
||||||
void (*const Pause) (void); ///< pause audio
|
void (*const Pause)(void); ///< pause audio
|
||||||
void (*const Init) (void); ///< initialize audio output module
|
void (*const Init)(void); ///< initialize audio output module
|
||||||
void (*const Exit) (void); ///< cleanup audio output module
|
void (*const Exit)(void); ///< cleanup audio output module
|
||||||
} AudioModule;
|
} AudioModule;
|
||||||
|
|
||||||
static const AudioModule NoopModule; ///< forward definition of noop module
|
static const AudioModule NoopModule; ///< forward definition of noop module
|
||||||
@ -260,8 +260,7 @@ static void AudioNormalizer(int16_t * samples, int count)
|
|||||||
if (avg > 0) {
|
if (avg > 0) {
|
||||||
factor = ((INT16_MAX / 8) * 1000U) / (uint32_t) sqrt(avg);
|
factor = ((INT16_MAX / 8) * 1000U) / (uint32_t) sqrt(avg);
|
||||||
// smooth normalize
|
// smooth normalize
|
||||||
AudioNormalizeFactor =
|
AudioNormalizeFactor = (AudioNormalizeFactor * 500 + factor * 500) / 1000;
|
||||||
(AudioNormalizeFactor * 500 + factor * 500) / 1000;
|
|
||||||
if (AudioNormalizeFactor < AudioMinNormalize) {
|
if (AudioNormalizeFactor < AudioMinNormalize) {
|
||||||
AudioNormalizeFactor = AudioMinNormalize;
|
AudioNormalizeFactor = AudioMinNormalize;
|
||||||
}
|
}
|
||||||
@ -271,8 +270,8 @@ static void AudioNormalizer(int16_t * samples, int count)
|
|||||||
} else {
|
} else {
|
||||||
factor = 1000;
|
factor = 1000;
|
||||||
}
|
}
|
||||||
Debug(4, "audio/noramlize: avg %8d, fac=%6.3f, norm=%6.3f\n",
|
Debug(4, "audio/noramlize: avg %8d, fac=%6.3f, norm=%6.3f\n", avg, factor / 1000.0,
|
||||||
avg, factor / 1000.0, AudioNormalizeFactor / 1000.0);
|
AudioNormalizeFactor / 1000.0);
|
||||||
}
|
}
|
||||||
|
|
||||||
AudioNormIndex = (AudioNormIndex + 1) % AudioNormMaxIndex;
|
AudioNormIndex = (AudioNormIndex + 1) % AudioNormMaxIndex;
|
||||||
@ -339,8 +338,7 @@ static void AudioCompressor(int16_t * samples, int count)
|
|||||||
if (max_sample > 0) {
|
if (max_sample > 0) {
|
||||||
factor = (INT16_MAX * 1000) / max_sample;
|
factor = (INT16_MAX * 1000) / max_sample;
|
||||||
// smooth compression (FIXME: make configurable?)
|
// smooth compression (FIXME: make configurable?)
|
||||||
AudioCompressionFactor =
|
AudioCompressionFactor = (AudioCompressionFactor * 950 + factor * 50) / 1000;
|
||||||
(AudioCompressionFactor * 950 + factor * 50) / 1000;
|
|
||||||
if (AudioCompressionFactor > factor) {
|
if (AudioCompressionFactor > factor) {
|
||||||
AudioCompressionFactor = factor; // no clipping
|
AudioCompressionFactor = factor; // no clipping
|
||||||
}
|
}
|
||||||
@ -351,8 +349,8 @@ static void AudioCompressor(int16_t * samples, int count)
|
|||||||
return; // silent nothing todo
|
return; // silent nothing todo
|
||||||
}
|
}
|
||||||
|
|
||||||
Debug(4, "audio/compress: max %5d, fac=%6.3f, com=%6.3f\n", max_sample,
|
Debug(4, "audio/compress: max %5d, fac=%6.3f, com=%6.3f\n", max_sample, factor / 1000.0,
|
||||||
factor / 1000.0, AudioCompressionFactor / 1000.0);
|
AudioCompressionFactor / 1000.0);
|
||||||
|
|
||||||
// apply compression factor
|
// apply compression factor
|
||||||
for (i = 0; i < count / AudioBytesProSample; ++i) {
|
for (i = 0; i < count / AudioBytesProSample; ++i) {
|
||||||
@ -460,8 +458,7 @@ static void AudioStereo2Mono(const int16_t * in, int frames, int16_t * out)
|
|||||||
** @param frames number of frames in sample buffer
|
** @param frames number of frames in sample buffer
|
||||||
** @param out output sample buffer
|
** @param out output sample buffer
|
||||||
*/
|
*/
|
||||||
static void AudioSurround2Stereo(const int16_t * in, int in_chan, int frames,
|
static void AudioSurround2Stereo(const int16_t * in, int in_chan, int frames, int16_t * out)
|
||||||
int16_t * out)
|
|
||||||
{
|
{
|
||||||
while (frames--) {
|
while (frames--) {
|
||||||
int l;
|
int l;
|
||||||
@ -540,8 +537,7 @@ static void AudioSurround2Stereo(const int16_t * in, int in_chan, int frames,
|
|||||||
** @param out output sample buffer
|
** @param out output sample buffer
|
||||||
** @param out_chan nr. of output channels
|
** @param out_chan nr. of output channels
|
||||||
*/
|
*/
|
||||||
static void AudioUpmix(const int16_t * in, int in_chan, int frames,
|
static void AudioUpmix(const int16_t * in, int in_chan, int frames, int16_t * out, int out_chan)
|
||||||
int16_t * out, int out_chan)
|
|
||||||
{
|
{
|
||||||
while (frames--) {
|
while (frames--) {
|
||||||
int i;
|
int i;
|
||||||
@ -571,8 +567,7 @@ static void AudioUpmix(const int16_t * in, int in_chan, int frames,
|
|||||||
** @param out output sample buffer
|
** @param out output sample buffer
|
||||||
** @param out_chan nr. of output channels
|
** @param out_chan nr. of output channels
|
||||||
*/
|
*/
|
||||||
static void AudioResample(const int16_t * in, int in_chan, int frames,
|
static void AudioResample(const int16_t * in, int in_chan, int frames, int16_t * out, int out_chan)
|
||||||
int16_t * out, int out_chan)
|
|
||||||
{
|
{
|
||||||
switch (in_chan * 8 + out_chan) {
|
switch (in_chan * 8 + out_chan) {
|
||||||
case 1 * 8 + 1:
|
case 1 * 8 + 1:
|
||||||
@ -607,8 +602,7 @@ static void AudioResample(const int16_t * in, int in_chan, int frames,
|
|||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
Error("audio: unsupported %d -> %d channels resample\n", in_chan,
|
Error("audio: unsupported %d -> %d channels resample\n", in_chan, out_chan);
|
||||||
out_chan);
|
|
||||||
// play silence
|
// play silence
|
||||||
memset(out, 0, frames * out_chan * AudioBytesProSample);
|
memset(out, 0, frames * out_chan * AudioBytesProSample);
|
||||||
break;
|
break;
|
||||||
@ -697,8 +691,7 @@ static int AudioRingAdd(unsigned sample_rate, int channels, int passthrough)
|
|||||||
AudioRing[AudioRingWrite].PTS = INT64_C(0x8000000000000000);
|
AudioRing[AudioRingWrite].PTS = INT64_C(0x8000000000000000);
|
||||||
RingBufferReset(AudioRing[AudioRingWrite].RingBuffer);
|
RingBufferReset(AudioRing[AudioRingWrite].RingBuffer);
|
||||||
|
|
||||||
Debug(3, "audio: %d ring buffer prepared\n",
|
Debug(3, "audio: %d ring buffer prepared\n", atomic_read(&AudioRingFilled) + 1);
|
||||||
atomic_read(&AudioRingFilled) + 1);
|
|
||||||
|
|
||||||
atomic_inc(&AudioRingFilled);
|
atomic_inc(&AudioRingFilled);
|
||||||
|
|
||||||
@ -707,7 +700,7 @@ static int AudioRingAdd(unsigned sample_rate, int channels, int passthrough)
|
|||||||
// tell thread, that there is something todo
|
// tell thread, that there is something todo
|
||||||
AudioRunning = 1;
|
AudioRunning = 1;
|
||||||
pthread_cond_signal(&AudioStartCond);
|
pthread_cond_signal(&AudioStartCond);
|
||||||
Debug(3,"Start on AudioRingAdd\n");
|
Debug(3, "Start on AudioRingAdd\n");
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
@ -796,14 +789,12 @@ static int AlsaPlayRingbuffer(void)
|
|||||||
if (n == -EAGAIN) {
|
if (n == -EAGAIN) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Warning(_("audio/alsa: avail underrun error? '%s'\n"),
|
Warning(_("audio/alsa: avail underrun error? '%s'\n"), snd_strerror(n));
|
||||||
snd_strerror(n));
|
|
||||||
err = snd_pcm_recover(AlsaPCMHandle, n, 0);
|
err = snd_pcm_recover(AlsaPCMHandle, n, 0);
|
||||||
if (err >= 0) {
|
if (err >= 0) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Error(_("audio/alsa: snd_pcm_avail_update(): %s\n"),
|
Error(_("audio/alsa: snd_pcm_avail_update(): %s\n"), snd_strerror(n));
|
||||||
snd_strerror(n));
|
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
avail = snd_pcm_frames_to_bytes(AlsaPCMHandle, n);
|
avail = snd_pcm_frames_to_bytes(AlsaPCMHandle, n);
|
||||||
@ -812,23 +803,20 @@ static int AlsaPlayRingbuffer(void)
|
|||||||
// happens with broken alsa drivers
|
// happens with broken alsa drivers
|
||||||
if (AudioThread) {
|
if (AudioThread) {
|
||||||
if (!AudioAlsaDriverBroken) {
|
if (!AudioAlsaDriverBroken) {
|
||||||
Error(_("audio/alsa: broken driver %d state '%s'\n"),
|
Error(_("audio/alsa: broken driver %d state '%s'\n"), avail,
|
||||||
avail,
|
|
||||||
snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle)));
|
snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle)));
|
||||||
}
|
}
|
||||||
// try to recover
|
// try to recover
|
||||||
if (snd_pcm_state(AlsaPCMHandle)
|
if (snd_pcm_state(AlsaPCMHandle)
|
||||||
== SND_PCM_STATE_PREPARED) {
|
== SND_PCM_STATE_PREPARED) {
|
||||||
if ((err = snd_pcm_start(AlsaPCMHandle)) < 0) {
|
if ((err = snd_pcm_start(AlsaPCMHandle)) < 0) {
|
||||||
Error(_("audio/alsa: snd_pcm_start(): %s\n"),
|
Error(_("audio/alsa: snd_pcm_start(): %s\n"), snd_strerror(err));
|
||||||
snd_strerror(err));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
usleep(5 * 1000);
|
usleep(5 * 1000);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
Debug(4, "audio/alsa: break state '%s'\n",
|
Debug(4, "audio/alsa: break state '%s'\n", snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle)));
|
||||||
snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle)));
|
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -849,8 +837,7 @@ static int AlsaPlayRingbuffer(void)
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
// muting pass-through AC-3, can produce disturbance
|
// muting pass-through AC-3, can produce disturbance
|
||||||
if (AudioMute || (AudioSoftVolume
|
if (AudioMute || (AudioSoftVolume && !AudioRing[AudioRingRead].Passthrough)) {
|
||||||
&& !AudioRing[AudioRingRead].Passthrough)) {
|
|
||||||
// FIXME: quick&dirty cast
|
// FIXME: quick&dirty cast
|
||||||
AudioSoftAmplifier((int16_t *) p, avail);
|
AudioSoftAmplifier((int16_t *) p, avail);
|
||||||
// FIXME: if not all are written, we double amplify them
|
// FIXME: if not all are written, we double amplify them
|
||||||
@ -879,14 +866,12 @@ static int AlsaPlayRingbuffer(void)
|
|||||||
goto again;
|
goto again;
|
||||||
}
|
}
|
||||||
*/
|
*/
|
||||||
Warning(_("audio/alsa: writei underrun error? '%s'\n"),
|
Warning(_("audio/alsa: writei underrun error? '%s'\n"), snd_strerror(err));
|
||||||
snd_strerror(err));
|
|
||||||
err = snd_pcm_recover(AlsaPCMHandle, err, 0);
|
err = snd_pcm_recover(AlsaPCMHandle, err, 0);
|
||||||
if (err >= 0) {
|
if (err >= 0) {
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
Error(_("audio/alsa: snd_pcm_writei failed: %s\n"),
|
Error(_("audio/alsa: snd_pcm_writei failed: %s\n"), snd_strerror(err));
|
||||||
snd_strerror(err));
|
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
// this could happen, if underrun happened
|
// this could happen, if underrun happened
|
||||||
@ -898,7 +883,6 @@ static int AlsaPlayRingbuffer(void)
|
|||||||
RingBufferReadAdvance(AudioRing[AudioRingRead].RingBuffer, avail);
|
RingBufferReadAdvance(AudioRing[AudioRingRead].RingBuffer, avail);
|
||||||
first = 0;
|
first = 0;
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -955,8 +939,7 @@ static int AlsaThread(void)
|
|||||||
}
|
}
|
||||||
// wait for space in kernel buffers
|
// wait for space in kernel buffers
|
||||||
if ((err = snd_pcm_wait(AlsaPCMHandle, 24)) < 0) {
|
if ((err = snd_pcm_wait(AlsaPCMHandle, 24)) < 0) {
|
||||||
Warning(_("audio/alsa: wait underrun error? '%s'\n"),
|
Warning(_("audio/alsa: wait underrun error? '%s'\n"), snd_strerror(err));
|
||||||
snd_strerror(err));
|
|
||||||
err = snd_pcm_recover(AlsaPCMHandle, err, 0);
|
err = snd_pcm_recover(AlsaPCMHandle, err, 0);
|
||||||
if (err >= 0) {
|
if (err >= 0) {
|
||||||
continue;
|
continue;
|
||||||
@ -980,8 +963,7 @@ static int AlsaThread(void)
|
|||||||
|
|
||||||
state = snd_pcm_state(AlsaPCMHandle);
|
state = snd_pcm_state(AlsaPCMHandle);
|
||||||
if (state != SND_PCM_STATE_RUNNING) {
|
if (state != SND_PCM_STATE_RUNNING) {
|
||||||
Debug(3, "audio/alsa: stopping play '%s'\n",
|
Debug(3, "audio/alsa: stopping play '%s'\n", snd_pcm_state_name(state));
|
||||||
snd_pcm_state_name(state));
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1035,8 +1017,7 @@ static snd_pcm_t *AlsaOpenPCM(int passthrough)
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
// open none blocking; if device is already used, we don't want wait
|
// open none blocking; if device is already used, we don't want wait
|
||||||
if ((err =
|
if ((err = snd_pcm_open(&handle, device, SND_PCM_STREAM_PLAYBACK, SND_PCM_NONBLOCK)) < 0) {
|
||||||
snd_pcm_open(&handle, device, SND_PCM_STREAM_PLAYBACK,SND_PCM_NONBLOCK)) < 0) {
|
|
||||||
Error(_("audio/alsa: playback open '%s' error: %s\n"), device, snd_strerror(err));
|
Error(_("audio/alsa: playback open '%s' error: %s\n"), device, snd_strerror(err));
|
||||||
return NULL;
|
return NULL;
|
||||||
}
|
}
|
||||||
@ -1065,8 +1046,7 @@ static void AlsaInitPCM(void)
|
|||||||
snd_pcm_hw_params_alloca(&hw_params);
|
snd_pcm_hw_params_alloca(&hw_params);
|
||||||
// choose all parameters
|
// choose all parameters
|
||||||
if ((err = snd_pcm_hw_params_any(handle, hw_params)) < 0) {
|
if ((err = snd_pcm_hw_params_any(handle, hw_params)) < 0) {
|
||||||
Error(_("audio: snd_pcm_hw_params_any: no configurations available: %s\n"),
|
Error(_("audio: snd_pcm_hw_params_any: no configurations available: %s\n"), snd_strerror(err));
|
||||||
snd_strerror(err));
|
|
||||||
}
|
}
|
||||||
AlsaCanPause = snd_pcm_hw_params_can_pause(hw_params);
|
AlsaCanPause = snd_pcm_hw_params_can_pause(hw_params);
|
||||||
Info(_("audio/alsa: supports pause: %s\n"), AlsaCanPause ? "yes" : "no");
|
Info(_("audio/alsa: supports pause: %s\n"), AlsaCanPause ? "yes" : "no");
|
||||||
@ -1119,8 +1099,7 @@ static void AlsaInitMixer(void)
|
|||||||
Debug(3, "audio/alsa: mixer %s - %s open\n", device, channel);
|
Debug(3, "audio/alsa: mixer %s - %s open\n", device, channel);
|
||||||
snd_mixer_open(&alsa_mixer, 0);
|
snd_mixer_open(&alsa_mixer, 0);
|
||||||
if (alsa_mixer && snd_mixer_attach(alsa_mixer, device) >= 0
|
if (alsa_mixer && snd_mixer_attach(alsa_mixer, device) >= 0
|
||||||
&& snd_mixer_selem_register(alsa_mixer, NULL, NULL) >= 0
|
&& snd_mixer_selem_register(alsa_mixer, NULL, NULL) >= 0 && snd_mixer_load(alsa_mixer) >= 0) {
|
||||||
&& snd_mixer_load(alsa_mixer) >= 0) {
|
|
||||||
|
|
||||||
const char *const alsa_mixer_elem_name = channel;
|
const char *const alsa_mixer_elem_name = channel;
|
||||||
|
|
||||||
@ -1130,11 +1109,10 @@ static void AlsaInitMixer(void)
|
|||||||
|
|
||||||
name = snd_mixer_selem_get_name(alsa_mixer_elem);
|
name = snd_mixer_selem_get_name(alsa_mixer_elem);
|
||||||
if (!strcasecmp(name, alsa_mixer_elem_name)) {
|
if (!strcasecmp(name, alsa_mixer_elem_name)) {
|
||||||
snd_mixer_selem_get_playback_volume_range(alsa_mixer_elem,
|
snd_mixer_selem_get_playback_volume_range(alsa_mixer_elem, &alsa_mixer_elem_min, &alsa_mixer_elem_max);
|
||||||
&alsa_mixer_elem_min, &alsa_mixer_elem_max);
|
|
||||||
AlsaRatio = 1000 * (alsa_mixer_elem_max - alsa_mixer_elem_min);
|
AlsaRatio = 1000 * (alsa_mixer_elem_max - alsa_mixer_elem_min);
|
||||||
Debug(3, "audio/alsa: PCM mixer found %ld - %ld ratio %d\n",
|
Debug(3, "audio/alsa: PCM mixer found %ld - %ld ratio %d\n", alsa_mixer_elem_min, alsa_mixer_elem_max,
|
||||||
alsa_mixer_elem_min, alsa_mixer_elem_max, AlsaRatio);
|
AlsaRatio);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1236,16 +1214,14 @@ static int AlsaSetup(int *freq, int *channels, int passthrough)
|
|||||||
for (;;) {
|
for (;;) {
|
||||||
if ((err =
|
if ((err =
|
||||||
snd_pcm_set_params(AlsaPCMHandle, SND_PCM_FORMAT_S16,
|
snd_pcm_set_params(AlsaPCMHandle, SND_PCM_FORMAT_S16,
|
||||||
AlsaUseMmap ? SND_PCM_ACCESS_MMAP_INTERLEAVED :
|
AlsaUseMmap ? SND_PCM_ACCESS_MMAP_INTERLEAVED : SND_PCM_ACCESS_RW_INTERLEAVED, *channels, *freq, 1,
|
||||||
SND_PCM_ACCESS_RW_INTERLEAVED, *channels, *freq, 1,
|
|
||||||
96 * 1000))) {
|
96 * 1000))) {
|
||||||
// try reduced buffer size (needed for sunxi)
|
// try reduced buffer size (needed for sunxi)
|
||||||
// FIXME: alternativ make this configurable
|
// FIXME: alternativ make this configurable
|
||||||
if ((err =
|
if ((err =
|
||||||
snd_pcm_set_params(AlsaPCMHandle, SND_PCM_FORMAT_S16,
|
snd_pcm_set_params(AlsaPCMHandle, SND_PCM_FORMAT_S16,
|
||||||
AlsaUseMmap ? SND_PCM_ACCESS_MMAP_INTERLEAVED :
|
AlsaUseMmap ? SND_PCM_ACCESS_MMAP_INTERLEAVED : SND_PCM_ACCESS_RW_INTERLEAVED, *channels,
|
||||||
SND_PCM_ACCESS_RW_INTERLEAVED, *channels, *freq, 1,
|
*freq, 1, 72 * 1000))) {
|
||||||
72 * 1000))) {
|
|
||||||
|
|
||||||
/*
|
/*
|
||||||
if ( err == -EBADFD ) {
|
if ( err == -EBADFD ) {
|
||||||
@ -1256,8 +1232,7 @@ static int AlsaSetup(int *freq, int *channels, int passthrough)
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
if (!AudioDoingInit) {
|
if (!AudioDoingInit) {
|
||||||
Error(_("audio/alsa: set params error: %s\n"),
|
Error(_("audio/alsa: set params error: %s\n"), snd_strerror(err));
|
||||||
snd_strerror(err));
|
|
||||||
}
|
}
|
||||||
// FIXME: must stop sound, AudioChannels ... invalid
|
// FIXME: must stop sound, AudioChannels ... invalid
|
||||||
return -1;
|
return -1;
|
||||||
@ -1274,41 +1249,30 @@ static int AlsaSetup(int *freq, int *channels, int passthrough)
|
|||||||
snd_pcm_sw_params_alloca(&sw_params);
|
snd_pcm_sw_params_alloca(&sw_params);
|
||||||
err = snd_pcm_sw_params_current(AlsaPCMHandle, sw_params);
|
err = snd_pcm_sw_params_current(AlsaPCMHandle, sw_params);
|
||||||
if (err < 0) {
|
if (err < 0) {
|
||||||
Error(_("audio: snd_pcm_sw_params_current failed: %s\n"),
|
Error(_("audio: snd_pcm_sw_params_current failed: %s\n"), snd_strerror(err));
|
||||||
snd_strerror(err));
|
|
||||||
}
|
}
|
||||||
if ((err = snd_pcm_sw_params_get_boundary(sw_params, &boundary)) < 0) {
|
if ((err = snd_pcm_sw_params_get_boundary(sw_params, &boundary)) < 0) {
|
||||||
Error(_("audio: snd_pcm_sw_params_get_boundary failed: %s\n"),
|
Error(_("audio: snd_pcm_sw_params_get_boundary failed: %s\n"), snd_strerror(err));
|
||||||
snd_strerror(err));
|
|
||||||
}
|
}
|
||||||
Debug(4, "audio/alsa: boundary %lu frames\n", boundary);
|
Debug(4, "audio/alsa: boundary %lu frames\n", boundary);
|
||||||
if ((err =
|
if ((err = snd_pcm_sw_params_set_stop_threshold(AlsaPCMHandle, sw_params, boundary)) < 0) {
|
||||||
snd_pcm_sw_params_set_stop_threshold(AlsaPCMHandle, sw_params,
|
Error(_("audio: snd_pcm_sw_params_set_silence_size failed: %s\n"), snd_strerror(err));
|
||||||
boundary)) < 0) {
|
|
||||||
Error(_("audio: snd_pcm_sw_params_set_silence_size failed: %s\n"),
|
|
||||||
snd_strerror(err));
|
|
||||||
}
|
}
|
||||||
if ((err =
|
if ((err = snd_pcm_sw_params_set_silence_size(AlsaPCMHandle, sw_params, boundary)) < 0) {
|
||||||
snd_pcm_sw_params_set_silence_size(AlsaPCMHandle, sw_params,
|
Error(_("audio: snd_pcm_sw_params_set_silence_size failed: %s\n"), snd_strerror(err));
|
||||||
boundary)) < 0) {
|
|
||||||
Error(_("audio: snd_pcm_sw_params_set_silence_size failed: %s\n"),
|
|
||||||
snd_strerror(err));
|
|
||||||
}
|
}
|
||||||
if ((err = snd_pcm_sw_params(AlsaPCMHandle, sw_params)) < 0) {
|
if ((err = snd_pcm_sw_params(AlsaPCMHandle, sw_params)) < 0) {
|
||||||
Error(_("audio: snd_pcm_sw_params failed: %s\n"),
|
Error(_("audio: snd_pcm_sw_params failed: %s\n"), snd_strerror(err));
|
||||||
snd_strerror(err));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// update buffer
|
// update buffer
|
||||||
|
|
||||||
snd_pcm_get_params(AlsaPCMHandle, &buffer_size, &period_size);
|
snd_pcm_get_params(AlsaPCMHandle, &buffer_size, &period_size);
|
||||||
Debug(3, "audio/alsa: buffer size %lu %zdms, period size %lu %zdms\n",
|
Debug(3, "audio/alsa: buffer size %lu %zdms, period size %lu %zdms\n", buffer_size,
|
||||||
buffer_size, snd_pcm_frames_to_bytes(AlsaPCMHandle,
|
snd_pcm_frames_to_bytes(AlsaPCMHandle, buffer_size) * 1000 / (*freq * *channels * AudioBytesProSample),
|
||||||
buffer_size) * 1000 / (*freq * *channels * AudioBytesProSample),
|
|
||||||
period_size, snd_pcm_frames_to_bytes(AlsaPCMHandle,
|
period_size, snd_pcm_frames_to_bytes(AlsaPCMHandle,
|
||||||
period_size) * 1000 / (*freq * *channels * AudioBytesProSample));
|
period_size) * 1000 / (*freq * *channels * AudioBytesProSample));
|
||||||
Debug(3, "audio/alsa: state %s\n",
|
Debug(3, "audio/alsa: state %s\n", snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle)));
|
||||||
snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle)));
|
|
||||||
|
|
||||||
AudioStartThreshold = snd_pcm_frames_to_bytes(AlsaPCMHandle, period_size);
|
AudioStartThreshold = snd_pcm_frames_to_bytes(AlsaPCMHandle, period_size);
|
||||||
// buffer time/delay in ms
|
// buffer time/delay in ms
|
||||||
@ -1316,8 +1280,7 @@ static int AlsaSetup(int *freq, int *channels, int passthrough)
|
|||||||
if (VideoAudioDelay > 0) {
|
if (VideoAudioDelay > 0) {
|
||||||
delay += VideoAudioDelay / 90;
|
delay += VideoAudioDelay / 90;
|
||||||
}
|
}
|
||||||
if (AudioStartThreshold <
|
if (AudioStartThreshold < (*freq * *channels * AudioBytesProSample * delay) / 1000U) {
|
||||||
(*freq * *channels * AudioBytesProSample * delay) / 1000U) {
|
|
||||||
AudioStartThreshold = (*freq * *channels * AudioBytesProSample * delay) / 1000U;
|
AudioStartThreshold = (*freq * *channels * AudioBytesProSample * delay) / 1000U;
|
||||||
}
|
}
|
||||||
// no bigger, than 1/3 the buffer
|
// no bigger, than 1/3 the buffer
|
||||||
@ -1376,11 +1339,11 @@ static void AlsaPause(void)
|
|||||||
/**
|
/**
|
||||||
** Empty log callback
|
** Empty log callback
|
||||||
*/
|
*/
|
||||||
static void AlsaNoopCallback( __attribute__ ((unused))
|
static void AlsaNoopCallback( __attribute__((unused))
|
||||||
const char *file, __attribute__ ((unused))
|
const char *file, __attribute__((unused))
|
||||||
int line, __attribute__ ((unused))
|
int line, __attribute__((unused))
|
||||||
const char *function, __attribute__ ((unused))
|
const char *function, __attribute__((unused))
|
||||||
int err, __attribute__ ((unused))
|
int err, __attribute__((unused))
|
||||||
const char *fmt, ...)
|
const char *fmt, ...)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
@ -1474,8 +1437,7 @@ static int OssPlayRingbuffer(void)
|
|||||||
int n;
|
int n;
|
||||||
|
|
||||||
if (ioctl(OssPcmFildes, SNDCTL_DSP_GETOSPACE, &bi) == -1) {
|
if (ioctl(OssPcmFildes, SNDCTL_DSP_GETOSPACE, &bi) == -1) {
|
||||||
Error(_("audio/oss: ioctl(SNDCTL_DSP_GETOSPACE): %s\n"),
|
Error(_("audio/oss: ioctl(SNDCTL_DSP_GETOSPACE): %s\n"), strerror(errno));
|
||||||
strerror(errno));
|
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
Debug(4, "audio/oss: %d bytes free\n", bi.bytes);
|
Debug(4, "audio/oss: %d bytes free\n", bi.bytes);
|
||||||
@ -1529,8 +1491,7 @@ static void OssFlushBuffers(void)
|
|||||||
if (OssPcmFildes != -1) {
|
if (OssPcmFildes != -1) {
|
||||||
// flush kernel buffers
|
// flush kernel buffers
|
||||||
if (ioctl(OssPcmFildes, SNDCTL_DSP_HALT_OUTPUT, NULL) < 0) {
|
if (ioctl(OssPcmFildes, SNDCTL_DSP_HALT_OUTPUT, NULL) < 0) {
|
||||||
Error(_("audio/oss: ioctl(SNDCTL_DSP_HALT_OUTPUT): %s\n"),
|
Error(_("audio/oss: ioctl(SNDCTL_DSP_HALT_OUTPUT): %s\n"), strerror(errno));
|
||||||
strerror(errno));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1614,13 +1575,11 @@ static int OssOpenPCM(int passthrough)
|
|||||||
device = "/dev/dsp";
|
device = "/dev/dsp";
|
||||||
}
|
}
|
||||||
if (!AudioDoingInit) {
|
if (!AudioDoingInit) {
|
||||||
Info(_("audio/oss: using %sdevice '%s'\n"),
|
Info(_("audio/oss: using %sdevice '%s'\n"), passthrough ? "pass-through " : "", device);
|
||||||
passthrough ? "pass-through " : "", device);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if ((fildes = open(device, O_WRONLY)) < 0) {
|
if ((fildes = open(device, O_WRONLY)) < 0) {
|
||||||
Error(_("audio/oss: can't open dsp device '%s': %s\n"), device,
|
Error(_("audio/oss: can't open dsp device '%s': %s\n"), device, strerror(errno));
|
||||||
strerror(errno));
|
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
return fildes;
|
return fildes;
|
||||||
@ -1666,8 +1625,7 @@ static void OssSetVolume(int volume)
|
|||||||
/**
|
/**
|
||||||
** Mixer channel name table.
|
** Mixer channel name table.
|
||||||
*/
|
*/
|
||||||
static const char *OssMixerChannelNames[SOUND_MIXER_NRDEVICES] =
|
static const char *OssMixerChannelNames[SOUND_MIXER_NRDEVICES] = SOUND_DEVICE_NAMES;
|
||||||
SOUND_DEVICE_NAMES;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
** Initialize OSS mixer.
|
** Initialize OSS mixer.
|
||||||
@ -1693,14 +1651,12 @@ static void OssInitMixer(void)
|
|||||||
Debug(3, "audio/oss: mixer %s - %s open\n", device, channel);
|
Debug(3, "audio/oss: mixer %s - %s open\n", device, channel);
|
||||||
|
|
||||||
if ((fildes = open(device, O_RDWR)) < 0) {
|
if ((fildes = open(device, O_RDWR)) < 0) {
|
||||||
Error(_("audio/oss: can't open mixer device '%s': %s\n"), device,
|
Error(_("audio/oss: can't open mixer device '%s': %s\n"), device, strerror(errno));
|
||||||
strerror(errno));
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// search channel name
|
// search channel name
|
||||||
if (ioctl(fildes, SOUND_MIXER_READ_DEVMASK, &devmask) < 0) {
|
if (ioctl(fildes, SOUND_MIXER_READ_DEVMASK, &devmask) < 0) {
|
||||||
Error(_("audio/oss: ioctl(SOUND_MIXER_READ_DEVMASK): %s\n"),
|
Error(_("audio/oss: ioctl(SOUND_MIXER_READ_DEVMASK): %s\n"), strerror(errno));
|
||||||
strerror(errno));
|
|
||||||
close(fildes);
|
close(fildes);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
@ -1744,8 +1700,7 @@ static int64_t OssGetDelay(void)
|
|||||||
// delay in bytes in kernel buffers
|
// delay in bytes in kernel buffers
|
||||||
delay = -1;
|
delay = -1;
|
||||||
if (ioctl(OssPcmFildes, SNDCTL_DSP_GETODELAY, &delay) == -1) {
|
if (ioctl(OssPcmFildes, SNDCTL_DSP_GETODELAY, &delay) == -1) {
|
||||||
Error(_("audio/oss: ioctl(SNDCTL_DSP_GETODELAY): %s\n"),
|
Error(_("audio/oss: ioctl(SNDCTL_DSP_GETODELAY): %s\n"), strerror(errno));
|
||||||
strerror(errno));
|
|
||||||
return 0L;
|
return 0L;
|
||||||
}
|
}
|
||||||
if (delay < 0) {
|
if (delay < 0) {
|
||||||
@ -1753,8 +1708,7 @@ static int64_t OssGetDelay(void)
|
|||||||
}
|
}
|
||||||
|
|
||||||
pts = ((int64_t) delay * 90 * 1000)
|
pts = ((int64_t) delay * 90 * 1000)
|
||||||
/ (AudioRing[AudioRingRead].HwSampleRate *
|
/ (AudioRing[AudioRingRead].HwSampleRate * AudioRing[AudioRingRead].HwChannels * AudioBytesProSample);
|
||||||
AudioRing[AudioRingRead].HwChannels * AudioBytesProSample);
|
|
||||||
|
|
||||||
return pts;
|
return pts;
|
||||||
}
|
}
|
||||||
@ -1810,13 +1764,11 @@ static int OssSetup(int *sample_rate, int *channels, int passthrough)
|
|||||||
|
|
||||||
tmp = *channels;
|
tmp = *channels;
|
||||||
if (ioctl(OssPcmFildes, SNDCTL_DSP_CHANNELS, &tmp) == -1) {
|
if (ioctl(OssPcmFildes, SNDCTL_DSP_CHANNELS, &tmp) == -1) {
|
||||||
Error(_("audio/oss: ioctl(SNDCTL_DSP_CHANNELS): %s\n"),
|
Error(_("audio/oss: ioctl(SNDCTL_DSP_CHANNELS): %s\n"), strerror(errno));
|
||||||
strerror(errno));
|
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
if (tmp != *channels) {
|
if (tmp != *channels) {
|
||||||
Warning(_("audio/oss: device doesn't support %d channels.\n"),
|
Warning(_("audio/oss: device doesn't support %d channels.\n"), *channels);
|
||||||
*channels);
|
|
||||||
*channels = tmp;
|
*channels = tmp;
|
||||||
ret = 1;
|
ret = 1;
|
||||||
}
|
}
|
||||||
@ -1827,8 +1779,7 @@ static int OssSetup(int *sample_rate, int *channels, int passthrough)
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
if (tmp != *sample_rate) {
|
if (tmp != *sample_rate) {
|
||||||
Warning(_("audio/oss: device doesn't support %dHz sample rate.\n"),
|
Warning(_("audio/oss: device doesn't support %dHz sample rate.\n"), *sample_rate);
|
||||||
*sample_rate);
|
|
||||||
*sample_rate = tmp;
|
*sample_rate = tmp;
|
||||||
ret = 1;
|
ret = 1;
|
||||||
}
|
}
|
||||||
@ -1842,8 +1793,7 @@ static int OssSetup(int *sample_rate, int *channels, int passthrough)
|
|||||||
#endif
|
#endif
|
||||||
|
|
||||||
if (ioctl(OssPcmFildes, SNDCTL_DSP_GETOSPACE, &bi) == -1) {
|
if (ioctl(OssPcmFildes, SNDCTL_DSP_GETOSPACE, &bi) == -1) {
|
||||||
Error(_("audio/oss: ioctl(SNDCTL_DSP_GETOSPACE): %s\n"),
|
Error(_("audio/oss: ioctl(SNDCTL_DSP_GETOSPACE): %s\n"), strerror(errno));
|
||||||
strerror(errno));
|
|
||||||
bi.fragsize = 4096;
|
bi.fragsize = 4096;
|
||||||
bi.fragstotal = 16;
|
bi.fragstotal = 16;
|
||||||
} else {
|
} else {
|
||||||
@ -1853,10 +1803,9 @@ static int OssSetup(int *sample_rate, int *channels, int passthrough)
|
|||||||
OssFragmentTime = (bi.fragsize * 1000)
|
OssFragmentTime = (bi.fragsize * 1000)
|
||||||
/ (*sample_rate * *channels * AudioBytesProSample);
|
/ (*sample_rate * *channels * AudioBytesProSample);
|
||||||
|
|
||||||
Debug(3, "audio/oss: buffer size %d %dms, fragment size %d %dms\n",
|
Debug(3, "audio/oss: buffer size %d %dms, fragment size %d %dms\n", bi.fragsize * bi.fragstotal,
|
||||||
bi.fragsize * bi.fragstotal, (bi.fragsize * bi.fragstotal * 1000)
|
(bi.fragsize * bi.fragstotal * 1000)
|
||||||
/ (*sample_rate * *channels * AudioBytesProSample), bi.fragsize,
|
/ (*sample_rate * *channels * AudioBytesProSample), bi.fragsize, OssFragmentTime);
|
||||||
OssFragmentTime);
|
|
||||||
|
|
||||||
// start when enough bytes for initial write
|
// start when enough bytes for initial write
|
||||||
AudioStartThreshold = (bi.fragsize - 1) * bi.fragstotal;
|
AudioStartThreshold = (bi.fragsize - 1) * bi.fragstotal;
|
||||||
@ -1866,10 +1815,8 @@ static int OssSetup(int *sample_rate, int *channels, int passthrough)
|
|||||||
if (VideoAudioDelay > 0) {
|
if (VideoAudioDelay > 0) {
|
||||||
delay += VideoAudioDelay / 90;
|
delay += VideoAudioDelay / 90;
|
||||||
}
|
}
|
||||||
if (AudioStartThreshold <
|
if (AudioStartThreshold < (*sample_rate * *channels * AudioBytesProSample * delay) / 1000U) {
|
||||||
(*sample_rate * *channels * AudioBytesProSample * delay) / 1000U) {
|
AudioStartThreshold = (*sample_rate * *channels * AudioBytesProSample * delay) / 1000U;
|
||||||
AudioStartThreshold =
|
|
||||||
(*sample_rate * *channels * AudioBytesProSample * delay) / 1000U;
|
|
||||||
}
|
}
|
||||||
// no bigger, than 1/3 the buffer
|
// no bigger, than 1/3 the buffer
|
||||||
if (AudioStartThreshold > AudioRingBufferSize / 3) {
|
if (AudioStartThreshold > AudioRingBufferSize / 3) {
|
||||||
@ -1961,7 +1908,7 @@ static int64_t NoopGetDelay(void)
|
|||||||
**
|
**
|
||||||
** @param volume volume (0 .. 1000)
|
** @param volume volume (0 .. 1000)
|
||||||
*/
|
*/
|
||||||
static void NoopSetVolume( __attribute__ ((unused))
|
static void NoopSetVolume( __attribute__((unused))
|
||||||
int volume)
|
int volume)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
@ -1973,9 +1920,9 @@ static void NoopSetVolume( __attribute__ ((unused))
|
|||||||
** @param channels number of channels
|
** @param channels number of channels
|
||||||
** @param passthrough use pass-through (AC-3, ...) device
|
** @param passthrough use pass-through (AC-3, ...) device
|
||||||
*/
|
*/
|
||||||
static int NoopSetup( __attribute__ ((unused))
|
static int NoopSetup( __attribute__((unused))
|
||||||
int *channels, __attribute__ ((unused))
|
int *channels, __attribute__((unused))
|
||||||
int *freq, __attribute__ ((unused))
|
int *freq, __attribute__((unused))
|
||||||
int passthrough)
|
int passthrough)
|
||||||
{
|
{
|
||||||
return -1;
|
return -1;
|
||||||
@ -2025,8 +1972,7 @@ static int AudioNextRing(void)
|
|||||||
sample_rate = AudioRing[AudioRingRead].HwSampleRate;
|
sample_rate = AudioRing[AudioRingRead].HwSampleRate;
|
||||||
channels = AudioRing[AudioRingRead].HwChannels;
|
channels = AudioRing[AudioRingRead].HwChannels;
|
||||||
if (AudioUsedModule->Setup(&sample_rate, &channels, passthrough)) {
|
if (AudioUsedModule->Setup(&sample_rate, &channels, passthrough)) {
|
||||||
Error(_("audio: can't set channels %d sample-rate %dHz\n"), channels,
|
Error(_("audio: can't set channels %d sample-rate %dHz\n"), channels, sample_rate);
|
||||||
sample_rate);
|
|
||||||
// FIXME: handle error
|
// FIXME: handle error
|
||||||
AudioRing[AudioRingRead].HwSampleRate = 0;
|
AudioRing[AudioRingRead].HwSampleRate = 0;
|
||||||
AudioRing[AudioRingRead].InSampleRate = 0;
|
AudioRing[AudioRingRead].InSampleRate = 0;
|
||||||
@ -2039,13 +1985,11 @@ static int AudioNextRing(void)
|
|||||||
|
|
||||||
Debug(3, "audio: a/v next buf(%d,%4zdms)\n", atomic_read(&AudioRingFilled),
|
Debug(3, "audio: a/v next buf(%d,%4zdms)\n", atomic_read(&AudioRingFilled),
|
||||||
(RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer) * 1000)
|
(RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer) * 1000)
|
||||||
/ (AudioRing[AudioRingWrite].HwSampleRate *
|
/ (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample));
|
||||||
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample));
|
|
||||||
|
|
||||||
// stop, if not enough in next buffer
|
// stop, if not enough in next buffer
|
||||||
used = RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer);
|
used = RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer);
|
||||||
if (AudioStartThreshold * 10 < used || (AudioVideoIsReady
|
if (AudioStartThreshold * 10 < used || (AudioVideoIsReady && AudioStartThreshold < used)) {
|
||||||
&& AudioStartThreshold < used)) {
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
return 1;
|
return 1;
|
||||||
@ -2059,7 +2003,7 @@ static int AudioNextRing(void)
|
|||||||
static void *AudioPlayHandlerThread(void *dummy)
|
static void *AudioPlayHandlerThread(void *dummy)
|
||||||
{
|
{
|
||||||
Debug(3, "audio: play thread started\n");
|
Debug(3, "audio: play thread started\n");
|
||||||
prctl(PR_SET_NAME,"cuvid audio",0,0,0);
|
prctl(PR_SET_NAME, "cuvid audio", 0, 0, 0);
|
||||||
for (;;) {
|
for (;;) {
|
||||||
// check if we should stop the thread
|
// check if we should stop the thread
|
||||||
if (AudioThreadStop) {
|
if (AudioThreadStop) {
|
||||||
@ -2077,10 +2021,8 @@ static void *AudioPlayHandlerThread(void *dummy)
|
|||||||
pthread_mutex_unlock(&AudioMutex);
|
pthread_mutex_unlock(&AudioMutex);
|
||||||
|
|
||||||
Debug(3, "audio: ----> %dms start\n", (AudioUsedBytes() * 1000)
|
Debug(3, "audio: ----> %dms start\n", (AudioUsedBytes() * 1000)
|
||||||
/ (!AudioRing[AudioRingWrite].HwSampleRate +
|
/ (!AudioRing[AudioRingWrite].HwSampleRate + !AudioRing[AudioRingWrite].HwChannels +
|
||||||
!AudioRing[AudioRingWrite].HwChannels +
|
AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample));
|
||||||
AudioRing[AudioRingWrite].HwSampleRate *
|
|
||||||
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample));
|
|
||||||
|
|
||||||
do {
|
do {
|
||||||
int filled;
|
int filled;
|
||||||
@ -2135,7 +2077,7 @@ static void *AudioPlayHandlerThread(void *dummy)
|
|||||||
|
|
||||||
// underrun, and no new ring buffer, goto sleep.
|
// underrun, and no new ring buffer, goto sleep.
|
||||||
if (!atomic_read(&AudioRingFilled)) {
|
if (!atomic_read(&AudioRingFilled)) {
|
||||||
Debug(3,"audio: HandlerThread Underrun with no new data\n");
|
Debug(3, "audio: HandlerThread Underrun with no new data\n");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2150,15 +2092,13 @@ static void *AudioPlayHandlerThread(void *dummy)
|
|||||||
passthrough = AudioRing[AudioRingRead].Passthrough;
|
passthrough = AudioRing[AudioRingRead].Passthrough;
|
||||||
sample_rate = AudioRing[AudioRingRead].HwSampleRate;
|
sample_rate = AudioRing[AudioRingRead].HwSampleRate;
|
||||||
channels = AudioRing[AudioRingRead].HwChannels;
|
channels = AudioRing[AudioRingRead].HwChannels;
|
||||||
Debug(3, "audio: thread channels %d frequency %dHz %s\n",
|
Debug(3, "audio: thread channels %d frequency %dHz %s\n", channels, sample_rate,
|
||||||
channels, sample_rate, passthrough ? "pass-through" : "");
|
passthrough ? "pass-through" : "");
|
||||||
// audio config changed?
|
// audio config changed?
|
||||||
if (old_passthrough != passthrough
|
if (old_passthrough != passthrough || old_sample_rate != sample_rate || old_channels != channels) {
|
||||||
|| old_sample_rate != sample_rate
|
|
||||||
|| old_channels != channels) {
|
|
||||||
// FIXME: wait for buffer drain
|
// FIXME: wait for buffer drain
|
||||||
if (AudioNextRing()) {
|
if (AudioNextRing()) {
|
||||||
Debug(3,"audio: HandlerThread break on nextring");
|
Debug(3, "audio: HandlerThread break on nextring");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
@ -2168,7 +2108,7 @@ static void *AudioPlayHandlerThread(void *dummy)
|
|||||||
}
|
}
|
||||||
// FIXME: check AudioPaused ...Thread()
|
// FIXME: check AudioPaused ...Thread()
|
||||||
if (AudioPaused) {
|
if (AudioPaused) {
|
||||||
Debug(3,"audio: HandlerThread break on paused");
|
Debug(3, "audio: HandlerThread break on paused");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
} while (AudioRing[AudioRingRead].HwSampleRate);
|
} while (AudioRing[AudioRingRead].HwSampleRate);
|
||||||
@ -2228,19 +2168,22 @@ static const AudioModule *AudioModules[] = {
|
|||||||
&NoopModule,
|
&NoopModule,
|
||||||
};
|
};
|
||||||
|
|
||||||
void AudioDelayms(int delayms) {
|
void AudioDelayms(int delayms)
|
||||||
|
{
|
||||||
int count;
|
int count;
|
||||||
unsigned char *p;
|
unsigned char *p;
|
||||||
|
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
printf("Try Delay Audio for %d ms Samplerate %d Channels %d bps %d\n",
|
printf("Try Delay Audio for %d ms Samplerate %d Channels %d bps %d\n", delayms,
|
||||||
delayms,AudioRing[AudioRingWrite].HwSampleRate,AudioRing[AudioRingWrite].HwChannels,AudioBytesProSample);
|
AudioRing[AudioRingWrite].HwSampleRate, AudioRing[AudioRingWrite].HwChannels, AudioBytesProSample);
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
count = delayms * AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample / 1000;
|
count =
|
||||||
|
delayms * AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample /
|
||||||
|
1000;
|
||||||
|
|
||||||
if (delayms < 5000 && delayms > 0) { // not more than 5seconds
|
if (delayms < 5000 && delayms > 0) { // not more than 5seconds
|
||||||
p = calloc(1,count);
|
p = calloc(1, count);
|
||||||
RingBufferWrite(AudioRing[AudioRingWrite].RingBuffer, p, count);
|
RingBufferWrite(AudioRing[AudioRingWrite].RingBuffer, p, count);
|
||||||
free(p);
|
free(p);
|
||||||
}
|
}
|
||||||
@ -2279,37 +2222,28 @@ void AudioEnqueue(const void *samples, int count)
|
|||||||
}
|
}
|
||||||
// audio sample modification allowed and needed?
|
// audio sample modification allowed and needed?
|
||||||
buffer = (void *)samples;
|
buffer = (void *)samples;
|
||||||
if (!AudioRing[AudioRingWrite].Passthrough && (AudioCompression
|
if (!AudioRing[AudioRingWrite].Passthrough && (AudioCompression || AudioNormalize
|
||||||
|| AudioNormalize
|
|| AudioRing[AudioRingWrite].InChannels != AudioRing[AudioRingWrite].HwChannels)) {
|
||||||
|| AudioRing[AudioRingWrite].InChannels !=
|
|
||||||
AudioRing[AudioRingWrite].HwChannels)) {
|
|
||||||
int frames;
|
int frames;
|
||||||
|
|
||||||
// resample into ring-buffer is too complex in the case of a roundabout
|
// resample into ring-buffer is too complex in the case of a roundabout
|
||||||
// just use a temporary buffer
|
// just use a temporary buffer
|
||||||
frames =
|
frames = count / (AudioRing[AudioRingWrite].InChannels * AudioBytesProSample);
|
||||||
count / (AudioRing[AudioRingWrite].InChannels *
|
buffer = alloca(frames * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample);
|
||||||
AudioBytesProSample);
|
|
||||||
buffer =
|
|
||||||
alloca(frames * AudioRing[AudioRingWrite].HwChannels *
|
|
||||||
AudioBytesProSample);
|
|
||||||
#ifdef USE_AUDIO_MIXER
|
#ifdef USE_AUDIO_MIXER
|
||||||
// Convert / resample input to hardware format
|
// Convert / resample input to hardware format
|
||||||
AudioResample(samples, AudioRing[AudioRingWrite].InChannels, frames,
|
AudioResample(samples, AudioRing[AudioRingWrite].InChannels, frames, buffer,
|
||||||
buffer, AudioRing[AudioRingWrite].HwChannels);
|
AudioRing[AudioRingWrite].HwChannels);
|
||||||
#else
|
#else
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
if (AudioRing[AudioRingWrite].InChannels !=
|
if (AudioRing[AudioRingWrite].InChannels != AudioRing[AudioRingWrite].HwChannels) {
|
||||||
AudioRing[AudioRingWrite].HwChannels) {
|
|
||||||
Debug(3, "audio: internal failure channels mismatch\n");
|
Debug(3, "audio: internal failure channels mismatch\n");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
memcpy(buffer, samples, count);
|
memcpy(buffer, samples, count);
|
||||||
#endif
|
#endif
|
||||||
count =
|
count = frames * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample;
|
||||||
frames * AudioRing[AudioRingWrite].HwChannels *
|
|
||||||
AudioBytesProSample;
|
|
||||||
|
|
||||||
if (AudioCompression) { // in place operation
|
if (AudioCompression) { // in place operation
|
||||||
AudioCompressor(buffer, count);
|
AudioCompressor(buffer, count);
|
||||||
@ -2320,7 +2254,7 @@ void AudioEnqueue(const void *samples, int count)
|
|||||||
}
|
}
|
||||||
|
|
||||||
n = RingBufferWrite(AudioRing[AudioRingWrite].RingBuffer, buffer, count);
|
n = RingBufferWrite(AudioRing[AudioRingWrite].RingBuffer, buffer, count);
|
||||||
if (n != (size_t) count) {
|
if (n != (size_t)count) {
|
||||||
Error(_("audio: can't place %d samples in ring buffer\n"), count);
|
Error(_("audio: can't place %d samples in ring buffer\n"), count);
|
||||||
// too many bytes are lost
|
// too many bytes are lost
|
||||||
// FIXME: caller checks buffer full.
|
// FIXME: caller checks buffer full.
|
||||||
@ -2336,11 +2270,9 @@ void AudioEnqueue(const void *samples, int count)
|
|||||||
// FIXME: round to packet size
|
// FIXME: round to packet size
|
||||||
|
|
||||||
Debug(4, "audio: start? %4zdms skip %dms\n", (n * 1000)
|
Debug(4, "audio: start? %4zdms skip %dms\n", (n * 1000)
|
||||||
/ (AudioRing[AudioRingWrite].HwSampleRate *
|
/ (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample),
|
||||||
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample),
|
|
||||||
(skip * 1000)
|
(skip * 1000)
|
||||||
/ (AudioRing[AudioRingWrite].HwSampleRate *
|
/ (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample));
|
||||||
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample));
|
|
||||||
|
|
||||||
if (skip) {
|
if (skip) {
|
||||||
if (n < (unsigned)skip) {
|
if (n < (unsigned)skip) {
|
||||||
@ -2359,14 +2291,13 @@ void AudioEnqueue(const void *samples, int count)
|
|||||||
// no lock needed, can wakeup next time
|
// no lock needed, can wakeup next time
|
||||||
AudioRunning = 1;
|
AudioRunning = 1;
|
||||||
pthread_cond_signal(&AudioStartCond);
|
pthread_cond_signal(&AudioStartCond);
|
||||||
Debug(3,"Start on AudioEnque\n");
|
Debug(3, "Start on AudioEnque\n");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// Update audio clock (stupid gcc developers thinks INT64_C is unsigned)
|
// Update audio clock (stupid gcc developers thinks INT64_C is unsigned)
|
||||||
if (AudioRing[AudioRingWrite].PTS != (int64_t) INT64_C(0x8000000000000000)) {
|
if (AudioRing[AudioRingWrite].PTS != (int64_t) INT64_C(0x8000000000000000)) {
|
||||||
AudioRing[AudioRingWrite].PTS += ((int64_t) count * 90 * 1000)
|
AudioRing[AudioRingWrite].PTS += ((int64_t) count * 90 * 1000)
|
||||||
/ (AudioRing[AudioRingWrite].HwSampleRate *
|
/ (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample);
|
||||||
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2385,10 +2316,8 @@ void AudioVideoReady(int64_t pts)
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// no valid audio known
|
// no valid audio known
|
||||||
if (!AudioRing[AudioRingWrite].HwSampleRate
|
if (!AudioRing[AudioRingWrite].HwSampleRate || !AudioRing[AudioRingWrite].HwChannels
|
||||||
|| !AudioRing[AudioRingWrite].HwChannels
|
|| AudioRing[AudioRingWrite].PTS == (int64_t) INT64_C(0x8000000000000000)) {
|
||||||
|| AudioRing[AudioRingWrite].PTS ==
|
|
||||||
(int64_t) INT64_C(0x8000000000000000)) {
|
|
||||||
Debug(3, "audio: a/v start, no valid audio\n");
|
Debug(3, "audio: a/v start, no valid audio\n");
|
||||||
AudioVideoIsReady = 1;
|
AudioVideoIsReady = 1;
|
||||||
return;
|
return;
|
||||||
@ -2398,26 +2327,22 @@ void AudioVideoReady(int64_t pts)
|
|||||||
used = RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer);
|
used = RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer);
|
||||||
audio_pts =
|
audio_pts =
|
||||||
AudioRing[AudioRingWrite].PTS -
|
AudioRing[AudioRingWrite].PTS -
|
||||||
(used * 90 * 1000) / (AudioRing[AudioRingWrite].HwSampleRate *
|
(used * 90 * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels *
|
||||||
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample);
|
AudioBytesProSample);
|
||||||
|
|
||||||
Debug(3, "audio: a/v sync buf(%d,%4zdms) %s | %s = %dms %s\n",
|
Debug(3, "audio: a/v sync buf(%d,%4zdms) %s | %s = %dms %s\n", atomic_read(&AudioRingFilled),
|
||||||
atomic_read(&AudioRingFilled),
|
(used * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels *
|
||||||
(used * 1000) / (AudioRing[AudioRingWrite].HwSampleRate *
|
AudioBytesProSample), Timestamp2String(pts), Timestamp2String(audio_pts), (int)(pts - audio_pts) / 90,
|
||||||
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample),
|
AudioRunning ? "running" : "ready");
|
||||||
Timestamp2String(pts), Timestamp2String(audio_pts),
|
|
||||||
(int)(pts - audio_pts) / 90, AudioRunning ? "running" : "ready");
|
|
||||||
|
|
||||||
if (!AudioRunning) {
|
if (!AudioRunning) {
|
||||||
int skip;
|
int skip;
|
||||||
|
|
||||||
// buffer ~15 video frames
|
// buffer ~15 video frames
|
||||||
// FIXME: HDTV can use smaller video buffer
|
// FIXME: HDTV can use smaller video buffer
|
||||||
skip =
|
skip = pts - 15 * 20 * 90 - AudioBufferTime * 90 - audio_pts + VideoAudioDelay;
|
||||||
pts - 15 * 20 * 90 - AudioBufferTime * 90 - audio_pts + VideoAudioDelay;
|
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
fprintf(stderr, "%dms %dms %dms\n", (int)(pts - audio_pts) / 90,
|
fprintf(stderr, "%dms %dms %dms\n", (int)(pts - audio_pts) / 90, VideoAudioDelay / 90, skip / 90);
|
||||||
VideoAudioDelay / 90, skip / 90);
|
|
||||||
#endif
|
#endif
|
||||||
// guard against old PTS
|
// guard against old PTS
|
||||||
if (skip > 0 && skip < 4000 * 90) {
|
if (skip > 0 && skip < 4000 * 90) {
|
||||||
@ -2429,15 +2354,13 @@ void AudioVideoReady(int64_t pts)
|
|||||||
skip = used;
|
skip = used;
|
||||||
}
|
}
|
||||||
Debug(3, "audio: sync advance %dms %d/%zd\n",
|
Debug(3, "audio: sync advance %dms %d/%zd\n",
|
||||||
(skip * 1000) / (AudioRing[AudioRingWrite].HwSampleRate *
|
(skip * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels *
|
||||||
AudioRing[AudioRingWrite].HwChannels *
|
|
||||||
AudioBytesProSample), skip, used);
|
AudioBytesProSample), skip, used);
|
||||||
RingBufferReadAdvance(AudioRing[AudioRingWrite].RingBuffer, skip);
|
RingBufferReadAdvance(AudioRing[AudioRingWrite].RingBuffer, skip);
|
||||||
|
|
||||||
used = RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer);
|
used = RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer);
|
||||||
}
|
} else {
|
||||||
else {
|
Debug(3, "No audio skip -> should skip %d\n", skip / 90);
|
||||||
Debug(3,"No audio skip -> should skip %d\n",skip/90);
|
|
||||||
}
|
}
|
||||||
// FIXME: skip<0 we need bigger audio buffer
|
// FIXME: skip<0 we need bigger audio buffer
|
||||||
|
|
||||||
@ -2445,27 +2368,22 @@ void AudioVideoReady(int64_t pts)
|
|||||||
if (AudioStartThreshold < used) {
|
if (AudioStartThreshold < used) {
|
||||||
AudioRunning = 1;
|
AudioRunning = 1;
|
||||||
pthread_cond_signal(&AudioStartCond);
|
pthread_cond_signal(&AudioStartCond);
|
||||||
Debug(3,"Start on AudioVideoReady\n");
|
Debug(3, "Start on AudioVideoReady\n");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
AudioVideoIsReady = 1;
|
AudioVideoIsReady = 1;
|
||||||
#if 0
|
#if 0
|
||||||
if (AudioRing[AudioRingWrite].HwSampleRate
|
if (AudioRing[AudioRingWrite].HwSampleRate && AudioRing[AudioRingWrite].HwChannels) {
|
||||||
&& AudioRing[AudioRingWrite].HwChannels) {
|
|
||||||
if (pts != (int64_t) INT64_C(0x8000000000000000)
|
if (pts != (int64_t) INT64_C(0x8000000000000000)
|
||||||
&& AudioRing[AudioRingWrite].PTS !=
|
&& AudioRing[AudioRingWrite].PTS != (int64_t) INT64_C(0x8000000000000000)) {
|
||||||
(int64_t) INT64_C(0x8000000000000000)) {
|
Debug(3, "audio: a/v %d %s\n", (int)(pts - AudioRing[AudioRingWrite].PTS) / 90,
|
||||||
Debug(3, "audio: a/v %d %s\n",
|
|
||||||
(int)(pts - AudioRing[AudioRingWrite].PTS) / 90,
|
|
||||||
AudioRunning ? "running" : "stopped");
|
AudioRunning ? "running" : "stopped");
|
||||||
}
|
}
|
||||||
Debug(3, "audio: start %4zdms %s|%s video ready\n",
|
Debug(3, "audio: start %4zdms %s|%s video ready\n",
|
||||||
(RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer) * 1000)
|
(RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer) * 1000)
|
||||||
/ (AudioRing[AudioRingWrite].HwSampleRate *
|
/ (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample),
|
||||||
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample),
|
Timestamp2String(pts), Timestamp2String(AudioRing[AudioRingWrite].PTS));
|
||||||
Timestamp2String(pts),
|
|
||||||
Timestamp2String(AudioRing[AudioRingWrite].PTS));
|
|
||||||
|
|
||||||
if (!AudioRunning) {
|
if (!AudioRunning) {
|
||||||
size_t used;
|
size_t used;
|
||||||
@ -2475,13 +2393,10 @@ void AudioVideoReady(int64_t pts)
|
|||||||
if (AudioStartThreshold < used) {
|
if (AudioStartThreshold < used) {
|
||||||
// too much audio buffered, skip it
|
// too much audio buffered, skip it
|
||||||
if (AudioStartThreshold < used) {
|
if (AudioStartThreshold < used) {
|
||||||
Debug(3, "audio: start %4zdms skip video ready\n",
|
Debug(3, "audio: start %4zdms skip video ready\n", ((used - AudioStartThreshold) * 1000)
|
||||||
((used - AudioStartThreshold) * 1000)
|
/ (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels *
|
||||||
/ (AudioRing[AudioRingWrite].HwSampleRate *
|
|
||||||
AudioRing[AudioRingWrite].HwChannels *
|
|
||||||
AudioBytesProSample));
|
AudioBytesProSample));
|
||||||
RingBufferReadAdvance(AudioRing[AudioRingWrite].RingBuffer,
|
RingBufferReadAdvance(AudioRing[AudioRingWrite].RingBuffer, used - AudioStartThreshold);
|
||||||
used - AudioStartThreshold);
|
|
||||||
}
|
}
|
||||||
AudioRunning = 1;
|
AudioRunning = 1;
|
||||||
pthread_cond_signal(&AudioStartCond);
|
pthread_cond_signal(&AudioStartCond);
|
||||||
@ -2538,7 +2453,7 @@ void AudioFlushBuffers(void)
|
|||||||
if (!AudioRunning) { // wakeup thread to flush buffers
|
if (!AudioRunning) { // wakeup thread to flush buffers
|
||||||
AudioRunning = 1;
|
AudioRunning = 1;
|
||||||
pthread_cond_signal(&AudioStartCond);
|
pthread_cond_signal(&AudioStartCond);
|
||||||
Debug(3,"Start on Flush\n");
|
Debug(3, "Start on Flush\n");
|
||||||
}
|
}
|
||||||
// FIXME: waiting on zero isn't correct, but currently works
|
// FIXME: waiting on zero isn't correct, but currently works
|
||||||
if (!atomic_read(&AudioRingFilled)) {
|
if (!atomic_read(&AudioRingFilled)) {
|
||||||
@ -2562,8 +2477,7 @@ void AudioPoller(void)
|
|||||||
*/
|
*/
|
||||||
int AudioFreeBytes(void)
|
int AudioFreeBytes(void)
|
||||||
{
|
{
|
||||||
return AudioRing[AudioRingWrite].RingBuffer ?
|
return AudioRing[AudioRingWrite].RingBuffer ? RingBufferFreeBytes(AudioRing[AudioRingWrite].RingBuffer)
|
||||||
RingBufferFreeBytes(AudioRing[AudioRingWrite].RingBuffer)
|
|
||||||
: INT32_MAX;
|
: INT32_MAX;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2573,8 +2487,7 @@ int AudioFreeBytes(void)
|
|||||||
int AudioUsedBytes(void)
|
int AudioUsedBytes(void)
|
||||||
{
|
{
|
||||||
// FIXME: not correct, if multiple buffer are in use
|
// FIXME: not correct, if multiple buffer are in use
|
||||||
return AudioRing[AudioRingWrite].RingBuffer ?
|
return AudioRing[AudioRingWrite].RingBuffer ? RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer) : 0;
|
||||||
RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer) : 0;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -2597,10 +2510,10 @@ int64_t AudioGetDelay(void)
|
|||||||
}
|
}
|
||||||
pts = AudioUsedModule->GetDelay();
|
pts = AudioUsedModule->GetDelay();
|
||||||
pts += ((int64_t) RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer)
|
pts += ((int64_t) RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer)
|
||||||
* 90 * 1000) / (AudioRing[AudioRingRead].HwSampleRate *
|
* 90 * 1000) / (AudioRing[AudioRingRead].HwSampleRate * AudioRing[AudioRingRead].HwChannels *
|
||||||
AudioRing[AudioRingRead].HwChannels * AudioBytesProSample);
|
AudioBytesProSample);
|
||||||
Debug(4, "audio: hw+sw delay %zd %" PRId64 "ms\n",
|
Debug(4, "audio: hw+sw delay %zd %" PRId64 "ms\n", RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer),
|
||||||
RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer), pts / 90);
|
pts / 90);
|
||||||
|
|
||||||
return pts;
|
return pts;
|
||||||
}
|
}
|
||||||
@ -2613,8 +2526,7 @@ int64_t AudioGetDelay(void)
|
|||||||
void AudioSetClock(int64_t pts)
|
void AudioSetClock(int64_t pts)
|
||||||
{
|
{
|
||||||
if (AudioRing[AudioRingWrite].PTS != pts) {
|
if (AudioRing[AudioRingWrite].PTS != pts) {
|
||||||
Debug(4, "audio: set clock %s -> %s pts\n",
|
Debug(4, "audio: set clock %s -> %s pts\n", Timestamp2String(AudioRing[AudioRingWrite].PTS),
|
||||||
Timestamp2String(AudioRing[AudioRingWrite].PTS),
|
|
||||||
Timestamp2String(pts));
|
Timestamp2String(pts));
|
||||||
}
|
}
|
||||||
AudioRing[AudioRingWrite].PTS = pts;
|
AudioRing[AudioRingWrite].PTS = pts;
|
||||||
@ -2652,8 +2564,7 @@ void AudioSetVolume(int volume)
|
|||||||
AudioVolume = volume;
|
AudioVolume = volume;
|
||||||
AudioMute = !volume;
|
AudioMute = !volume;
|
||||||
// reduce loudness for stereo output
|
// reduce loudness for stereo output
|
||||||
if (AudioStereoDescent && AudioRing[AudioRingRead].InChannels == 2
|
if (AudioStereoDescent && AudioRing[AudioRingRead].InChannels == 2 && !AudioRing[AudioRingRead].Passthrough) {
|
||||||
&& !AudioRing[AudioRingRead].Passthrough) {
|
|
||||||
volume -= AudioStereoDescent;
|
volume -= AudioStereoDescent;
|
||||||
if (volume < 0) {
|
if (volume < 0) {
|
||||||
volume = 0;
|
volume = 0;
|
||||||
@ -2682,8 +2593,7 @@ void AudioSetVolume(int volume)
|
|||||||
*/
|
*/
|
||||||
int AudioSetup(int *freq, int *channels, int passthrough)
|
int AudioSetup(int *freq, int *channels, int passthrough)
|
||||||
{
|
{
|
||||||
Debug(3, "audio: setup channels %d frequency %dHz %s\n", *channels, *freq,
|
Debug(3, "audio: setup channels %d frequency %dHz %s\n", *channels, *freq, passthrough ? "pass-through" : "");
|
||||||
passthrough ? "pass-through" : "");
|
|
||||||
|
|
||||||
// invalid parameter
|
// invalid parameter
|
||||||
if (!freq || !channels || !*freq || !*channels) {
|
if (!freq || !channels || !*freq || !*channels) {
|
||||||
@ -3023,12 +2933,9 @@ void AudioInit(void)
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
for (u = 0; u < AudioRatesMax; ++u) {
|
for (u = 0; u < AudioRatesMax; ++u) {
|
||||||
Info(_("audio: %6dHz supports %d %d %d %d %d %d %d %d channels\n"),
|
Info(_("audio: %6dHz supports %d %d %d %d %d %d %d %d channels\n"), AudioRatesTable[u],
|
||||||
AudioRatesTable[u], AudioChannelMatrix[u][1],
|
AudioChannelMatrix[u][1], AudioChannelMatrix[u][2], AudioChannelMatrix[u][3], AudioChannelMatrix[u][4],
|
||||||
AudioChannelMatrix[u][2], AudioChannelMatrix[u][3],
|
AudioChannelMatrix[u][5], AudioChannelMatrix[u][6], AudioChannelMatrix[u][7], AudioChannelMatrix[u][8]);
|
||||||
AudioChannelMatrix[u][4], AudioChannelMatrix[u][5],
|
|
||||||
AudioChannelMatrix[u][6], AudioChannelMatrix[u][7],
|
|
||||||
AudioChannelMatrix[u][8]);
|
|
||||||
}
|
}
|
||||||
#ifdef USE_AUDIO_THREAD
|
#ifdef USE_AUDIO_THREAD
|
||||||
if (AudioUsedModule->Thread) { // supports threads
|
if (AudioUsedModule->Thread) { // supports threads
|
||||||
@ -3101,8 +3008,7 @@ static void PrintVersion(void)
|
|||||||
#ifdef GIT_REV
|
#ifdef GIT_REV
|
||||||
"(GIT-" GIT_REV ")"
|
"(GIT-" GIT_REV ")"
|
||||||
#endif
|
#endif
|
||||||
",\n\t(c) 2009 - 2013 by Johns\n"
|
",\n\t(c) 2009 - 2013 by Johns\n" "\tLicense AGPLv3: GNU Affero General Public License version 3\n");
|
||||||
"\tLicense AGPLv3: GNU Affero General Public License version 3\n");
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -3110,8 +3016,7 @@ static void PrintVersion(void)
|
|||||||
*/
|
*/
|
||||||
static void PrintUsage(void)
|
static void PrintUsage(void)
|
||||||
{
|
{
|
||||||
printf("Usage: audio_test [-?dhv]\n"
|
printf("Usage: audio_test [-?dhv]\n" "\t-d\tenable debug, more -d increase the verbosity\n"
|
||||||
"\t-d\tenable debug, more -d increase the verbosity\n"
|
|
||||||
"\t-? -h\tdisplay this message\n" "\t-v\tdisplay version information\n"
|
"\t-? -h\tdisplay this message\n" "\t-v\tdisplay version information\n"
|
||||||
"Only idiots print usage on stderr!\n");
|
"Only idiots print usage on stderr!\n");
|
||||||
}
|
}
|
||||||
|
339
codec.c
339
codec.c
@ -100,6 +100,7 @@ static pthread_mutex_t CodecLockMutex;
|
|||||||
/// Flag prefer fast channel switch
|
/// Flag prefer fast channel switch
|
||||||
char CodecUsePossibleDefectFrames;
|
char CodecUsePossibleDefectFrames;
|
||||||
AVBufferRef *hw_device_ctx;
|
AVBufferRef *hw_device_ctx;
|
||||||
|
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
// Video
|
// Video
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
@ -137,11 +138,11 @@ struct _video_decoder_
|
|||||||
** valid format, the formats are ordered by
|
** valid format, the formats are ordered by
|
||||||
** quality.
|
** quality.
|
||||||
*/
|
*/
|
||||||
static enum AVPixelFormat Codec_get_format(AVCodecContext * video_ctx,
|
static enum AVPixelFormat Codec_get_format(AVCodecContext * video_ctx, const enum AVPixelFormat *fmt)
|
||||||
const enum AVPixelFormat *fmt)
|
|
||||||
{
|
{
|
||||||
VideoDecoder *decoder;
|
VideoDecoder *decoder;
|
||||||
enum AVPixelFormat fmt1;
|
enum AVPixelFormat fmt1;
|
||||||
|
|
||||||
decoder = video_ctx->opaque;
|
decoder = video_ctx->opaque;
|
||||||
|
|
||||||
// bug in ffmpeg 1.1.1, called with zero width or height
|
// bug in ffmpeg 1.1.1, called with zero width or height
|
||||||
@ -149,10 +150,9 @@ static enum AVPixelFormat Codec_get_format(AVCodecContext * video_ctx,
|
|||||||
Error("codec/video: ffmpeg/libav buggy: width or height zero\n");
|
Error("codec/video: ffmpeg/libav buggy: width or height zero\n");
|
||||||
}
|
}
|
||||||
|
|
||||||
// decoder->GetFormatDone = 1;
|
// decoder->GetFormatDone = 1;
|
||||||
return Video_get_format(decoder->HwDecoder, video_ctx, fmt);
|
return Video_get_format(decoder->HwDecoder, video_ctx, fmt);
|
||||||
|
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
//static void Codec_free_buffer(void *opaque, uint8_t *data);
|
//static void Codec_free_buffer(void *opaque, uint8_t *data);
|
||||||
@ -181,9 +181,8 @@ static int Codec_get_buffer2(AVCodecContext * video_ctx, AVFrame * frame, int fl
|
|||||||
Codec_get_format(video_ctx, fmts);
|
Codec_get_format(video_ctx, fmts);
|
||||||
}
|
}
|
||||||
#if 0
|
#if 0
|
||||||
if (decoder->hwaccel_get_buffer && (AV_PIX_FMT_VDPAU == decoder->hwaccel_pix_fmt ||
|
if (decoder->hwaccel_get_buffer && (AV_PIX_FMT_VDPAU == decoder->hwaccel_pix_fmt
|
||||||
AV_PIX_FMT_CUDA == decoder->hwaccel_pix_fmt ||
|
|| AV_PIX_FMT_CUDA == decoder->hwaccel_pix_fmt || AV_PIX_FMT_VAAPI == decoder->hwaccel_pix_fmt)) {
|
||||||
AV_PIX_FMT_VAAPI == decoder->hwaccel_pix_fmt)) {
|
|
||||||
//Debug(3,"hwaccel get_buffer\n");
|
//Debug(3,"hwaccel get_buffer\n");
|
||||||
return decoder->hwaccel_get_buffer(video_ctx, frame, flags);
|
return decoder->hwaccel_get_buffer(video_ctx, frame, flags);
|
||||||
}
|
}
|
||||||
@ -225,7 +224,6 @@ void CodecVideoDelDecoder(VideoDecoder * decoder)
|
|||||||
free(decoder);
|
free(decoder);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
** Open video decoder.
|
** Open video decoder.
|
||||||
**
|
**
|
||||||
@ -236,7 +234,7 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
|
|||||||
{
|
{
|
||||||
AVCodec *video_codec;
|
AVCodec *video_codec;
|
||||||
const char *name;
|
const char *name;
|
||||||
int ret,deint=2;
|
int ret, deint = 2;
|
||||||
|
|
||||||
Debug(3, "***************codec: Video Open using video codec ID %#06x (%s)\n", codec_id,
|
Debug(3, "***************codec: Video Open using video codec ID %#06x (%s)\n", codec_id,
|
||||||
avcodec_get_name(codec_id));
|
avcodec_get_name(codec_id));
|
||||||
@ -263,8 +261,8 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
|
|||||||
#endif
|
#endif
|
||||||
if (name && (video_codec = avcodec_find_decoder_by_name(name))) {
|
if (name && (video_codec = avcodec_find_decoder_by_name(name))) {
|
||||||
Debug(3, "codec: decoder found\n");
|
Debug(3, "codec: decoder found\n");
|
||||||
} else if ((video_codec = avcodec_find_decoder(codec_id))==NULL) {
|
} else if ((video_codec = avcodec_find_decoder(codec_id)) == NULL) {
|
||||||
Debug(3,"Decoder %s not supported %p\n",name,video_codec);
|
Debug(3, "Decoder %s not supported %p\n", name, video_codec);
|
||||||
Fatal(_(" No decoder found"));
|
Fatal(_(" No decoder found"));
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -280,7 +278,6 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
|
|||||||
}
|
}
|
||||||
decoder->VideoCtx->hw_device_ctx = av_buffer_ref(HwDeviceContext);
|
decoder->VideoCtx->hw_device_ctx = av_buffer_ref(HwDeviceContext);
|
||||||
|
|
||||||
|
|
||||||
// FIXME: for software decoder use all cpus, otherwise 1
|
// FIXME: for software decoder use all cpus, otherwise 1
|
||||||
decoder->VideoCtx->thread_count = 1;
|
decoder->VideoCtx->thread_count = 1;
|
||||||
|
|
||||||
@ -289,7 +286,6 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
|
|||||||
decoder->VideoCtx->framerate.num = 50;
|
decoder->VideoCtx->framerate.num = 50;
|
||||||
decoder->VideoCtx->framerate.den = 1;
|
decoder->VideoCtx->framerate.den = 1;
|
||||||
|
|
||||||
|
|
||||||
pthread_mutex_lock(&CodecLockMutex);
|
pthread_mutex_lock(&CodecLockMutex);
|
||||||
// open codec
|
// open codec
|
||||||
#ifdef YADIF
|
#ifdef YADIF
|
||||||
@ -298,64 +294,61 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
|
|||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
decoder->VideoCtx->extra_hw_frames = 8; // VIDEO_SURFACES_MAX +1
|
decoder->VideoCtx->extra_hw_frames = 8; // VIDEO_SURFACES_MAX +1
|
||||||
if (video_codec->capabilities & (AV_CODEC_CAP_AUTO_THREADS)) {
|
if (video_codec->capabilities & (AV_CODEC_CAP_AUTO_THREADS)) {
|
||||||
Debug(3,"codec: auto threads enabled");
|
Debug(3, "codec: auto threads enabled");
|
||||||
decoder->VideoCtx->thread_count = 0;
|
decoder->VideoCtx->thread_count = 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (video_codec->capabilities & AV_CODEC_CAP_TRUNCATED) {
|
if (video_codec->capabilities & AV_CODEC_CAP_TRUNCATED) {
|
||||||
Debug(3,"codec: supports truncated packets");
|
Debug(3, "codec: supports truncated packets");
|
||||||
//decoder->VideoCtx->flags |= CODEC_FLAG_TRUNCATED;
|
//decoder->VideoCtx->flags |= CODEC_FLAG_TRUNCATED;
|
||||||
}
|
}
|
||||||
// FIXME: own memory management for video frames.
|
// FIXME: own memory management for video frames.
|
||||||
if (video_codec->capabilities & AV_CODEC_CAP_DR1) {
|
if (video_codec->capabilities & AV_CODEC_CAP_DR1) {
|
||||||
Debug(3,"codec: can use own buffer management");
|
Debug(3, "codec: can use own buffer management");
|
||||||
}
|
}
|
||||||
if (video_codec->capabilities & AV_CODEC_CAP_FRAME_THREADS) {
|
if (video_codec->capabilities & AV_CODEC_CAP_FRAME_THREADS) {
|
||||||
Debug(3,"codec: supports frame threads");
|
Debug(3, "codec: supports frame threads");
|
||||||
decoder->VideoCtx->thread_count = 0;
|
decoder->VideoCtx->thread_count = 0;
|
||||||
// decoder->VideoCtx->thread_type |= FF_THREAD_FRAME;
|
// decoder->VideoCtx->thread_type |= FF_THREAD_FRAME;
|
||||||
}
|
}
|
||||||
if (video_codec->capabilities & AV_CODEC_CAP_SLICE_THREADS) {
|
if (video_codec->capabilities & AV_CODEC_CAP_SLICE_THREADS) {
|
||||||
Debug(3,"codec: supports slice threads");
|
Debug(3, "codec: supports slice threads");
|
||||||
decoder->VideoCtx->thread_count = 0;
|
decoder->VideoCtx->thread_count = 0;
|
||||||
// decoder->VideoCtx->thread_type |= FF_THREAD_SLICE;
|
// decoder->VideoCtx->thread_type |= FF_THREAD_SLICE;
|
||||||
}
|
}
|
||||||
if (av_opt_set_int(decoder->VideoCtx, "refcounted_frames", 1, 0)<0)
|
if (av_opt_set_int(decoder->VideoCtx, "refcounted_frames", 1, 0) < 0)
|
||||||
Fatal(_("VAAPI Refcounts invalid\n"));
|
Fatal(_("VAAPI Refcounts invalid\n"));
|
||||||
decoder->VideoCtx->thread_safe_callbacks = 0;
|
decoder->VideoCtx->thread_safe_callbacks = 0;
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
#ifdef CUVID
|
#ifdef CUVID
|
||||||
if (strcmp(decoder->VideoCodec->long_name,"Nvidia CUVID MPEG2VIDEO decoder") == 0) { // deinterlace for mpeg2 is somehow broken
|
if (strcmp(decoder->VideoCodec->long_name, "Nvidia CUVID MPEG2VIDEO decoder") == 0) { // deinterlace for mpeg2 is somehow broken
|
||||||
if (av_opt_set_int(decoder->VideoCtx->priv_data, "deint", deint ,0) < 0) { // adaptive
|
if (av_opt_set_int(decoder->VideoCtx->priv_data, "deint", deint, 0) < 0) { // adaptive
|
||||||
pthread_mutex_unlock(&CodecLockMutex);
|
pthread_mutex_unlock(&CodecLockMutex);
|
||||||
Fatal(_("codec: can't set option deint to video codec!\n"));
|
Fatal(_("codec: can't set option deint to video codec!\n"));
|
||||||
}
|
}
|
||||||
#if 1
|
#if 1
|
||||||
if (av_opt_set_int(decoder->VideoCtx->priv_data, "surfaces", 9 ,0) < 0) {
|
if (av_opt_set_int(decoder->VideoCtx->priv_data, "surfaces", 9, 0) < 0) {
|
||||||
pthread_mutex_unlock(&CodecLockMutex);
|
pthread_mutex_unlock(&CodecLockMutex);
|
||||||
Fatal(_("codec: can't set option surfces to video codec!\n"));
|
Fatal(_("codec: can't set option surfces to video codec!\n"));
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
if (av_opt_set(decoder->VideoCtx->priv_data, "drop_second_field", "false" ,0) < 0) {
|
if (av_opt_set(decoder->VideoCtx->priv_data, "drop_second_field", "false", 0) < 0) {
|
||||||
pthread_mutex_unlock(&CodecLockMutex);
|
pthread_mutex_unlock(&CodecLockMutex);
|
||||||
Fatal(_("codec: can't set option drop 2.field to video codec!\n"));
|
Fatal(_("codec: can't set option drop 2.field to video codec!\n"));
|
||||||
}
|
}
|
||||||
}
|
} else if (strstr(decoder->VideoCodec->long_name, "Nvidia CUVID") != NULL) {
|
||||||
else if (strstr(decoder->VideoCodec->long_name,"Nvidia CUVID") != NULL) {
|
if (av_opt_set_int(decoder->VideoCtx->priv_data, "deint", deint, 0) < 0) { // adaptive
|
||||||
if (av_opt_set_int(decoder->VideoCtx->priv_data, "deint", deint ,0) < 0) { // adaptive
|
|
||||||
pthread_mutex_unlock(&CodecLockMutex);
|
pthread_mutex_unlock(&CodecLockMutex);
|
||||||
Fatal(_("codec: can't set option deint to video codec!\n"));
|
Fatal(_("codec: can't set option deint to video codec!\n"));
|
||||||
}
|
}
|
||||||
#if 1
|
#if 1
|
||||||
if (av_opt_set_int(decoder->VideoCtx->priv_data, "surfaces", 13 ,0) < 0) {
|
if (av_opt_set_int(decoder->VideoCtx->priv_data, "surfaces", 13, 0) < 0) {
|
||||||
pthread_mutex_unlock(&CodecLockMutex);
|
pthread_mutex_unlock(&CodecLockMutex);
|
||||||
Fatal(_("codec: can't set option surfces to video codec!\n"));
|
Fatal(_("codec: can't set option surfces to video codec!\n"));
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
if (av_opt_set(decoder->VideoCtx->priv_data, "drop_second_field", "false" ,0) < 0) {
|
if (av_opt_set(decoder->VideoCtx->priv_data, "drop_second_field", "false", 0) < 0) {
|
||||||
pthread_mutex_unlock(&CodecLockMutex);
|
pthread_mutex_unlock(&CodecLockMutex);
|
||||||
Fatal(_("codec: can't set option drop 2.field to video codec!\n"));
|
Fatal(_("codec: can't set option drop 2.field to video codec!\n"));
|
||||||
}
|
}
|
||||||
@ -366,7 +359,7 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
|
|||||||
pthread_mutex_unlock(&CodecLockMutex);
|
pthread_mutex_unlock(&CodecLockMutex);
|
||||||
Fatal(_("codec: can't open video codec!\n"));
|
Fatal(_("codec: can't open video codec!\n"));
|
||||||
}
|
}
|
||||||
Debug(3," Codec open %d\n",ret);
|
Debug(3, " Codec open %d\n", ret);
|
||||||
|
|
||||||
pthread_mutex_unlock(&CodecLockMutex);
|
pthread_mutex_unlock(&CodecLockMutex);
|
||||||
|
|
||||||
@ -384,7 +377,6 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
|
|||||||
decoder->VideoCtx->draw_horiz_band = NULL;
|
decoder->VideoCtx->draw_horiz_band = NULL;
|
||||||
decoder->VideoCtx->hwaccel_context = VideoGetHwAccelContext(decoder->HwDecoder);
|
decoder->VideoCtx->hwaccel_context = VideoGetHwAccelContext(decoder->HwDecoder);
|
||||||
|
|
||||||
|
|
||||||
//
|
//
|
||||||
// Prepare frame buffer for decoder
|
// Prepare frame buffer for decoder
|
||||||
//
|
//
|
||||||
@ -401,25 +393,25 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
|
|||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
** Close video decoder.
|
** Close video decoder.
|
||||||
**
|
**
|
||||||
** @param video_decoder private video decoder
|
** @param video_decoder private video decoder
|
||||||
*/
|
*/
|
||||||
void CodecVideoClose(VideoDecoder *video_decoder)
|
void CodecVideoClose(VideoDecoder * video_decoder)
|
||||||
{
|
{
|
||||||
AVFrame *frame;
|
AVFrame *frame;
|
||||||
|
|
||||||
// FIXME: play buffered data
|
// FIXME: play buffered data
|
||||||
// av_frame_free(&video_decoder->Frame); // callee does checks
|
// av_frame_free(&video_decoder->Frame); // callee does checks
|
||||||
|
|
||||||
Debug(3,"CodecVideoClose\n");
|
Debug(3, "CodecVideoClose\n");
|
||||||
if (video_decoder->VideoCtx) {
|
if (video_decoder->VideoCtx) {
|
||||||
pthread_mutex_lock(&CodecLockMutex);
|
pthread_mutex_lock(&CodecLockMutex);
|
||||||
#if 1
|
#if 1
|
||||||
frame = av_frame_alloc();
|
frame = av_frame_alloc();
|
||||||
avcodec_send_packet(video_decoder->VideoCtx, NULL);
|
avcodec_send_packet(video_decoder->VideoCtx, NULL);
|
||||||
while (avcodec_receive_frame(video_decoder->VideoCtx,frame) >= 0);
|
while (avcodec_receive_frame(video_decoder->VideoCtx, frame) >= 0) ;
|
||||||
av_frame_free(&frame);
|
av_frame_free(&frame);
|
||||||
#endif
|
#endif
|
||||||
avcodec_close(video_decoder->VideoCtx);
|
avcodec_close(video_decoder->VideoCtx);
|
||||||
@ -454,9 +446,8 @@ void DisplayPts(AVCodecContext * video_ctx, AVFrame * frame)
|
|||||||
}
|
}
|
||||||
ms_delay = (1000 * video_ctx->time_base.num) / video_ctx->time_base.den;
|
ms_delay = (1000 * video_ctx->time_base.num) / video_ctx->time_base.den;
|
||||||
ms_delay += frame->repeat_pict * ms_delay / 2;
|
ms_delay += frame->repeat_pict * ms_delay / 2;
|
||||||
printf("codec: PTS %s%s %" PRId64 " %d %d/%d %d/%d %dms\n",
|
printf("codec: PTS %s%s %" PRId64 " %d %d/%d %d/%d %dms\n", frame->repeat_pict ? "r" : " ",
|
||||||
frame->repeat_pict ? "r" : " ", frame->interlaced_frame ? "I" : " ",
|
frame->interlaced_frame ? "I" : " ", pts, (int)(pts - last_pts) / 90, video_ctx->time_base.num,
|
||||||
pts, (int)(pts - last_pts) / 90, video_ctx->time_base.num,
|
|
||||||
video_ctx->time_base.den, video_ctx->framerate.num, video_ctx->framerate.den, ms_delay);
|
video_ctx->time_base.den, video_ctx->framerate.num, video_ctx->framerate.den, ms_delay);
|
||||||
|
|
||||||
if (pts != (int64_t) AV_NOPTS_VALUE) {
|
if (pts != (int64_t) AV_NOPTS_VALUE) {
|
||||||
@ -473,9 +464,10 @@ void DisplayPts(AVCodecContext * video_ctx, AVFrame * frame)
|
|||||||
** @param avpkt video packet
|
** @param avpkt video packet
|
||||||
*/
|
*/
|
||||||
extern int CuvidTestSurfaces();
|
extern int CuvidTestSurfaces();
|
||||||
|
|
||||||
#ifdef YADIF
|
#ifdef YADIF
|
||||||
extern int init_filters(AVCodecContext * dec_ctx,void * decoder,AVFrame *frame);
|
extern int init_filters(AVCodecContext * dec_ctx, void *decoder, AVFrame * frame);
|
||||||
extern int push_filters(AVCodecContext * dec_ctx,void * decoder,AVFrame *frame);
|
extern int push_filters(AVCodecContext * dec_ctx, void *decoder, AVFrame * frame);
|
||||||
#endif
|
#endif
|
||||||
#ifdef VAAPI
|
#ifdef VAAPI
|
||||||
void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
|
void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
|
||||||
@ -490,36 +482,34 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
|
|||||||
*pkt = *avpkt; // use copy
|
*pkt = *avpkt; // use copy
|
||||||
ret = avcodec_send_packet(video_ctx, pkt);
|
ret = avcodec_send_packet(video_ctx, pkt);
|
||||||
if (ret < 0) {
|
if (ret < 0) {
|
||||||
Debug(4,"codec: sending video packet failed");
|
Debug(4, "codec: sending video packet failed");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
frame = av_frame_alloc();
|
frame = av_frame_alloc();
|
||||||
ret = avcodec_receive_frame(video_ctx, frame);
|
ret = avcodec_receive_frame(video_ctx, frame);
|
||||||
if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {
|
if (ret < 0 && ret != AVERROR(EAGAIN) && ret != AVERROR_EOF) {
|
||||||
Debug(4,"codec: receiving video frame failed");
|
Debug(4, "codec: receiving video frame failed");
|
||||||
av_frame_free(&frame);
|
av_frame_free(&frame);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
if (ret >= 0) {
|
if (ret >= 0) {
|
||||||
if (decoder->filter ) {
|
if (decoder->filter) {
|
||||||
if (decoder->filter == 1) {
|
if (decoder->filter == 1) {
|
||||||
if (init_filters(video_ctx,decoder->HwDecoder,frame) < 0) {
|
if (init_filters(video_ctx, decoder->HwDecoder, frame) < 0) {
|
||||||
Debug(3,"video: Init of VAAPI deint Filter failed\n");
|
Debug(3, "video: Init of VAAPI deint Filter failed\n");
|
||||||
decoder->filter = 0;
|
decoder->filter = 0;
|
||||||
}
|
} else {
|
||||||
else {
|
Debug(3, "Init VAAPI deint ok\n");
|
||||||
Debug(3,"Init VAAPI deint ok\n");
|
|
||||||
decoder->filter = 2;
|
decoder->filter = 2;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (frame->interlaced_frame && decoder->filter == 2 && (frame->height != 720)) { // broken ZDF sends Interlaced flag
|
if (frame->interlaced_frame && decoder->filter == 2 && (frame->height != 720)) { // broken ZDF sends Interlaced flag
|
||||||
ret = push_filters(video_ctx,decoder->HwDecoder,frame);
|
ret = push_filters(video_ctx, decoder->HwDecoder, frame);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
VideoRenderFrame(decoder->HwDecoder, video_ctx, frame);
|
VideoRenderFrame(decoder->HwDecoder, video_ctx, frame);
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
av_frame_free(&frame);
|
av_frame_free(&frame);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -530,16 +520,14 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
|
|||||||
void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
|
void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
|
||||||
{
|
{
|
||||||
AVCodecContext *video_ctx;
|
AVCodecContext *video_ctx;
|
||||||
AVFrame *frame
|
AVFrame *frame;
|
||||||
|
int ret, ret1;
|
||||||
;
|
|
||||||
int ret,ret1;
|
|
||||||
int got_frame;
|
int got_frame;
|
||||||
int consumed = 0;
|
int consumed = 0;
|
||||||
static uint64_t first_time = 0;
|
static uint64_t first_time = 0;
|
||||||
const AVPacket *pkt;
|
const AVPacket *pkt;
|
||||||
|
|
||||||
next_part:
|
next_part:
|
||||||
video_ctx = decoder->VideoCtx;
|
video_ctx = decoder->VideoCtx;
|
||||||
|
|
||||||
pkt = avpkt; // use copy
|
pkt = avpkt; // use copy
|
||||||
@ -566,26 +554,24 @@ next_part:
|
|||||||
ret = avcodec_receive_frame(video_ctx, frame); // get new frame
|
ret = avcodec_receive_frame(video_ctx, frame); // get new frame
|
||||||
if (ret >= 0) { // one is avail.
|
if (ret >= 0) { // one is avail.
|
||||||
got_frame = 1;
|
got_frame = 1;
|
||||||
}
|
} else {
|
||||||
else {
|
|
||||||
got_frame = 0;
|
got_frame = 0;
|
||||||
}
|
}
|
||||||
// printf("got %s packet from decoder\n",got_frame?"1":"no");
|
// printf("got %s packet from decoder\n",got_frame?"1":"no");
|
||||||
if (got_frame) { // frame completed
|
if (got_frame) { // frame completed
|
||||||
#ifdef YADIF
|
#ifdef YADIF
|
||||||
if (decoder->filter ) {
|
if (decoder->filter) {
|
||||||
if (decoder->filter == 1) {
|
if (decoder->filter == 1) {
|
||||||
if (init_filters(video_ctx,decoder->HwDecoder,frame) < 0) {
|
if (init_filters(video_ctx, decoder->HwDecoder, frame) < 0) {
|
||||||
Fatal(_("video: Init of YADIF Filter failed\n"));
|
Fatal(_("video: Init of YADIF Filter failed\n"));
|
||||||
decoder->filter = 0;
|
decoder->filter = 0;
|
||||||
}
|
} else {
|
||||||
else {
|
Debug(3, "Init YADIF ok\n");
|
||||||
Debug(3,"Init YADIF ok\n");
|
|
||||||
decoder->filter = 2;
|
decoder->filter = 2;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (frame->interlaced_frame && decoder->filter == 2 && (frame->height != 720)) { // broken ZDF sends Interlaced flag
|
if (frame->interlaced_frame && decoder->filter == 2 && (frame->height != 720)) { // broken ZDF sends Interlaced flag
|
||||||
ret = push_filters(video_ctx,decoder->HwDecoder,frame);
|
ret = push_filters(video_ctx, decoder->HwDecoder, frame);
|
||||||
// av_frame_unref(frame);
|
// av_frame_unref(frame);
|
||||||
continue;
|
continue;
|
||||||
}
|
}
|
||||||
@ -757,8 +743,7 @@ void CodecAudioOpen(AudioDecoder * audio_decoder, int codec_id)
|
|||||||
{
|
{
|
||||||
AVCodec *audio_codec;
|
AVCodec *audio_codec;
|
||||||
|
|
||||||
Debug(3, "codec: using audio codec ID %#06x (%s)\n", codec_id,
|
Debug(3, "codec: using audio codec ID %#06x (%s)\n", codec_id, avcodec_get_name(codec_id));
|
||||||
avcodec_get_name(codec_id));
|
|
||||||
if (!(audio_codec = avcodec_find_decoder(codec_id))) {
|
if (!(audio_codec = avcodec_find_decoder(codec_id))) {
|
||||||
// if (!(audio_codec = avcodec_find_decoder(codec_id))) {
|
// if (!(audio_codec = avcodec_find_decoder(codec_id))) {
|
||||||
Fatal(_("codec: codec ID %#06x not found\n"), codec_id);
|
Fatal(_("codec: codec ID %#06x not found\n"), codec_id);
|
||||||
@ -771,8 +756,7 @@ void CodecAudioOpen(AudioDecoder * audio_decoder, int codec_id)
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (CodecDownmix) {
|
if (CodecDownmix) {
|
||||||
audio_decoder->AudioCtx->request_channel_layout =
|
audio_decoder->AudioCtx->request_channel_layout = AV_CH_LAYOUT_STEREO_DOWNMIX;
|
||||||
AV_CH_LAYOUT_STEREO_DOWNMIX;
|
|
||||||
}
|
}
|
||||||
pthread_mutex_lock(&CodecLockMutex);
|
pthread_mutex_lock(&CodecLockMutex);
|
||||||
// open codec
|
// open codec
|
||||||
@ -793,7 +777,6 @@ void CodecAudioOpen(AudioDecoder * audio_decoder, int codec_id)
|
|||||||
pthread_mutex_unlock(&CodecLockMutex);
|
pthread_mutex_unlock(&CodecLockMutex);
|
||||||
Debug(3, "codec: audio '%s'\n", audio_decoder->AudioCodec->long_name);
|
Debug(3, "codec: audio '%s'\n", audio_decoder->AudioCodec->long_name);
|
||||||
|
|
||||||
|
|
||||||
audio_decoder->SampleRate = 0;
|
audio_decoder->SampleRate = 0;
|
||||||
audio_decoder->Channels = 0;
|
audio_decoder->Channels = 0;
|
||||||
audio_decoder->HwSampleRate = 0;
|
audio_decoder->HwSampleRate = 0;
|
||||||
@ -954,19 +937,16 @@ static void CodecReorderAudioFrame(int16_t * buf, int size, int channels)
|
|||||||
** @param audio_decoder audio decoder data
|
** @param audio_decoder audio decoder data
|
||||||
** @param[out] passthrough pass-through output
|
** @param[out] passthrough pass-through output
|
||||||
*/
|
*/
|
||||||
static int CodecAudioUpdateHelper(AudioDecoder * audio_decoder,
|
static int CodecAudioUpdateHelper(AudioDecoder * audio_decoder, int *passthrough)
|
||||||
int *passthrough)
|
|
||||||
{
|
{
|
||||||
const AVCodecContext *audio_ctx;
|
const AVCodecContext *audio_ctx;
|
||||||
int err;
|
int err;
|
||||||
|
|
||||||
audio_ctx = audio_decoder->AudioCtx;
|
audio_ctx = audio_decoder->AudioCtx;
|
||||||
Debug(3, "codec/audio: format change %s %dHz *%d channels%s%s%s%s%s\n",
|
Debug(3, "codec/audio: format change %s %dHz *%d channels%s%s%s%s%s\n",
|
||||||
av_get_sample_fmt_name(audio_ctx->sample_fmt), audio_ctx->sample_rate,
|
av_get_sample_fmt_name(audio_ctx->sample_fmt), audio_ctx->sample_rate, audio_ctx->channels,
|
||||||
audio_ctx->channels, CodecPassthrough & CodecPCM ? " PCM" : "",
|
CodecPassthrough & CodecPCM ? " PCM" : "", CodecPassthrough & CodecMPA ? " MPA" : "",
|
||||||
CodecPassthrough & CodecMPA ? " MPA" : "",
|
CodecPassthrough & CodecAC3 ? " AC-3" : "", CodecPassthrough & CodecEAC3 ? " E-AC-3" : "",
|
||||||
CodecPassthrough & CodecAC3 ? " AC-3" : "",
|
|
||||||
CodecPassthrough & CodecEAC3 ? " E-AC-3" : "",
|
|
||||||
CodecPassthrough ? " pass-through" : "");
|
CodecPassthrough ? " pass-through" : "");
|
||||||
|
|
||||||
*passthrough = 0;
|
*passthrough = 0;
|
||||||
@ -978,8 +958,7 @@ static int CodecAudioUpdateHelper(AudioDecoder * audio_decoder,
|
|||||||
|
|
||||||
// SPDIF/HDMI pass-through
|
// SPDIF/HDMI pass-through
|
||||||
if ((CodecPassthrough & CodecAC3 && audio_ctx->codec_id == AV_CODEC_ID_AC3)
|
if ((CodecPassthrough & CodecAC3 && audio_ctx->codec_id == AV_CODEC_ID_AC3)
|
||||||
|| (CodecPassthrough & CodecEAC3
|
|| (CodecPassthrough & CodecEAC3 && audio_ctx->codec_id == AV_CODEC_ID_EAC3)) {
|
||||||
&& audio_ctx->codec_id == AV_CODEC_ID_EAC3)) {
|
|
||||||
if (audio_ctx->codec_id == AV_CODEC_ID_EAC3) {
|
if (audio_ctx->codec_id == AV_CODEC_ID_EAC3) {
|
||||||
// E-AC-3 over HDMI some receivers need HBR
|
// E-AC-3 over HDMI some receivers need HBR
|
||||||
audio_decoder->HwSampleRate *= 4;
|
audio_decoder->HwSampleRate *= 4;
|
||||||
@ -990,16 +969,12 @@ static int CodecAudioUpdateHelper(AudioDecoder * audio_decoder,
|
|||||||
*passthrough = 1;
|
*passthrough = 1;
|
||||||
}
|
}
|
||||||
// channels/sample-rate not support?
|
// channels/sample-rate not support?
|
||||||
if ((err =
|
if ((err = AudioSetup(&audio_decoder->HwSampleRate, &audio_decoder->HwChannels, *passthrough))) {
|
||||||
AudioSetup(&audio_decoder->HwSampleRate,
|
|
||||||
&audio_decoder->HwChannels, *passthrough))) {
|
|
||||||
|
|
||||||
// try E-AC-3 none HBR
|
// try E-AC-3 none HBR
|
||||||
audio_decoder->HwSampleRate /= 4;
|
audio_decoder->HwSampleRate /= 4;
|
||||||
if (audio_ctx->codec_id != AV_CODEC_ID_EAC3
|
if (audio_ctx->codec_id != AV_CODEC_ID_EAC3
|
||||||
|| (err =
|
|| (err = AudioSetup(&audio_decoder->HwSampleRate, &audio_decoder->HwChannels, *passthrough))) {
|
||||||
AudioSetup(&audio_decoder->HwSampleRate,
|
|
||||||
&audio_decoder->HwChannels, *passthrough))) {
|
|
||||||
|
|
||||||
Debug(3, "codec/audio: audio setup error\n");
|
Debug(3, "codec/audio: audio setup error\n");
|
||||||
// FIXME: handle errors
|
// FIXME: handle errors
|
||||||
@ -1009,9 +984,8 @@ static int CodecAudioUpdateHelper(AudioDecoder * audio_decoder,
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
Debug(3, "codec/audio: resample %s %dHz *%d -> %s %dHz *%d\n",
|
Debug(3, "codec/audio: resample %s %dHz *%d -> %s %dHz *%d\n", av_get_sample_fmt_name(audio_ctx->sample_fmt),
|
||||||
av_get_sample_fmt_name(audio_ctx->sample_fmt), audio_ctx->sample_rate,
|
audio_ctx->sample_rate, audio_ctx->channels, av_get_sample_fmt_name(AV_SAMPLE_FMT_S16),
|
||||||
audio_ctx->channels, av_get_sample_fmt_name(AV_SAMPLE_FMT_S16),
|
|
||||||
audio_decoder->HwSampleRate, audio_decoder->HwChannels);
|
audio_decoder->HwSampleRate, audio_decoder->HwChannels);
|
||||||
|
|
||||||
return 0;
|
return 0;
|
||||||
@ -1023,8 +997,7 @@ static int CodecAudioUpdateHelper(AudioDecoder * audio_decoder,
|
|||||||
** @param audio_decoder audio decoder data
|
** @param audio_decoder audio decoder data
|
||||||
** @param avpkt undecoded audio packet
|
** @param avpkt undecoded audio packet
|
||||||
*/
|
*/
|
||||||
static int CodecAudioPassthroughHelper(AudioDecoder * audio_decoder,
|
static int CodecAudioPassthroughHelper(AudioDecoder * audio_decoder, const AVPacket * avpkt)
|
||||||
const AVPacket * avpkt)
|
|
||||||
{
|
{
|
||||||
#ifdef USE_PASSTHROUGH
|
#ifdef USE_PASSTHROUGH
|
||||||
const AVCodecContext *audio_ctx;
|
const AVCodecContext *audio_ctx;
|
||||||
@ -1045,12 +1018,10 @@ static int CodecAudioPassthroughHelper(AudioDecoder * audio_decoder,
|
|||||||
int x;
|
int x;
|
||||||
|
|
||||||
x = (audio_decoder->DriftFrac +
|
x = (audio_decoder->DriftFrac +
|
||||||
(audio_decoder->DriftCorr * spdif_sz)) / (10 *
|
(audio_decoder->DriftCorr * spdif_sz)) / (10 * audio_decoder->HwSampleRate * 100);
|
||||||
audio_decoder->HwSampleRate * 100);
|
|
||||||
audio_decoder->DriftFrac =
|
audio_decoder->DriftFrac =
|
||||||
(audio_decoder->DriftFrac +
|
(audio_decoder->DriftFrac +
|
||||||
(audio_decoder->DriftCorr * spdif_sz)) % (10 *
|
(audio_decoder->DriftCorr * spdif_sz)) % (10 * audio_decoder->HwSampleRate * 100);
|
||||||
audio_decoder->HwSampleRate * 100);
|
|
||||||
// round to word border
|
// round to word border
|
||||||
x *= audio_decoder->HwChannels * 4;
|
x *= audio_decoder->HwChannels * 4;
|
||||||
if (x < -64) { // limit correction
|
if (x < -64) { // limit correction
|
||||||
@ -1081,8 +1052,7 @@ static int CodecAudioPassthroughHelper(AudioDecoder * audio_decoder,
|
|||||||
AudioEnqueue(spdif, spdif_sz);
|
AudioEnqueue(spdif, spdif_sz);
|
||||||
return 1;
|
return 1;
|
||||||
}
|
}
|
||||||
if (CodecPassthrough & CodecEAC3
|
if (CodecPassthrough & CodecEAC3 && audio_ctx->codec_id == AV_CODEC_ID_EAC3) {
|
||||||
&& audio_ctx->codec_id == AV_CODEC_ID_EAC3) {
|
|
||||||
uint16_t *spdif;
|
uint16_t *spdif;
|
||||||
int spdif_sz;
|
int spdif_sz;
|
||||||
int repeat;
|
int repeat;
|
||||||
@ -1121,8 +1091,7 @@ static int CodecAudioPassthroughHelper(AudioDecoder * audio_decoder,
|
|||||||
spdif[1] = htole16(0x4E1F);
|
spdif[1] = htole16(0x4E1F);
|
||||||
spdif[2] = htole16(IEC61937_EAC3);
|
spdif[2] = htole16(IEC61937_EAC3);
|
||||||
spdif[3] = htole16(audio_decoder->SpdifIndex * 8);
|
spdif[3] = htole16(audio_decoder->SpdifIndex * 8);
|
||||||
memset(spdif + 4 + audio_decoder->SpdifIndex / 2, 0,
|
memset(spdif + 4 + audio_decoder->SpdifIndex / 2, 0, spdif_sz - 8 - audio_decoder->SpdifIndex);
|
||||||
spdif_sz - 8 - audio_decoder->SpdifIndex);
|
|
||||||
|
|
||||||
// don't play with the eac-3 samples
|
// don't play with the eac-3 samples
|
||||||
AudioEnqueue(spdif, spdif_sz);
|
AudioEnqueue(spdif, spdif_sz);
|
||||||
@ -1165,8 +1134,7 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
|
|||||||
audio_decoder->LastDelay = delay;
|
audio_decoder->LastDelay = delay;
|
||||||
audio_decoder->Drift = 0;
|
audio_decoder->Drift = 0;
|
||||||
audio_decoder->DriftFrac = 0;
|
audio_decoder->DriftFrac = 0;
|
||||||
Debug(3, "codec/audio: inital drift delay %" PRId64 "ms\n",
|
Debug(3, "codec/audio: inital drift delay %" PRId64 "ms\n", delay / 90);
|
||||||
delay / 90);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// collect over some time
|
// collect over some time
|
||||||
@ -1176,12 +1144,9 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
|
|||||||
}
|
}
|
||||||
|
|
||||||
tim_diff = (nowtime.tv_sec - audio_decoder->LastTime.tv_sec)
|
tim_diff = (nowtime.tv_sec - audio_decoder->LastTime.tv_sec)
|
||||||
* 1000 * 1000 * 1000 + (nowtime.tv_nsec -
|
* 1000 * 1000 * 1000 + (nowtime.tv_nsec - audio_decoder->LastTime.tv_nsec);
|
||||||
audio_decoder->LastTime.tv_nsec);
|
|
||||||
|
|
||||||
drift =
|
drift = (tim_diff * 90) / (1000 * 1000) - pts_diff + delay - audio_decoder->LastDelay;
|
||||||
(tim_diff * 90) / (1000 * 1000) - pts_diff + delay -
|
|
||||||
audio_decoder->LastDelay;
|
|
||||||
|
|
||||||
// adjust rounding error
|
// adjust rounding error
|
||||||
nowtime.tv_nsec -= nowtime.tv_nsec % (1000 * 1000 / 90);
|
nowtime.tv_nsec -= nowtime.tv_nsec % (1000 * 1000 / 90);
|
||||||
@ -1190,16 +1155,13 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
|
|||||||
audio_decoder->LastDelay = delay;
|
audio_decoder->LastDelay = delay;
|
||||||
|
|
||||||
if (0) {
|
if (0) {
|
||||||
Debug(3,
|
Debug(3, "codec/audio: interval P:%5" PRId64 "ms T:%5" PRId64 "ms D:%4" PRId64 "ms %f %d\n", pts_diff / 90,
|
||||||
"codec/audio: interval P:%5" PRId64 "ms T:%5" PRId64 "ms D:%4"
|
tim_diff / (1000 * 1000), delay / 90, drift / 90.0, audio_decoder->DriftCorr);
|
||||||
PRId64 "ms %f %d\n", pts_diff / 90, tim_diff / (1000 * 1000),
|
|
||||||
delay / 90, drift / 90.0, audio_decoder->DriftCorr);
|
|
||||||
}
|
}
|
||||||
// underruns and av_resample have the same time :(((
|
// underruns and av_resample have the same time :(((
|
||||||
if (abs(drift) > 10 * 90) {
|
if (abs(drift) > 10 * 90) {
|
||||||
// drift too big, pts changed?
|
// drift too big, pts changed?
|
||||||
Debug(3, "codec/audio: drift(%6d) %3dms reset\n",
|
Debug(3, "codec/audio: drift(%6d) %3dms reset\n", audio_decoder->DriftCorr, drift / 90);
|
||||||
audio_decoder->DriftCorr, drift / 90);
|
|
||||||
audio_decoder->LastDelay = 0;
|
audio_decoder->LastDelay = 0;
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
corr = 0; // keep gcc happy
|
corr = 0; // keep gcc happy
|
||||||
@ -1233,15 +1195,13 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
|
|||||||
} else {
|
} else {
|
||||||
distance = (pts_diff * audio_decoder->HwSampleRate) / (90 * 1000);
|
distance = (pts_diff * audio_decoder->HwSampleRate) / (90 * 1000);
|
||||||
}
|
}
|
||||||
av_resample_compensate(audio_decoder->AvResample,
|
av_resample_compensate(audio_decoder->AvResample, audio_decoder->DriftCorr / 10, distance);
|
||||||
audio_decoder->DriftCorr / 10, distance);
|
|
||||||
}
|
}
|
||||||
if (1) {
|
if (1) {
|
||||||
static int c;
|
static int c;
|
||||||
|
|
||||||
if (!(c++ % 10)) {
|
if (!(c++ % 10)) {
|
||||||
Debug(3, "codec/audio: drift(%6d) %8dus %5d\n",
|
Debug(3, "codec/audio: drift(%6d) %8dus %5d\n", audio_decoder->DriftCorr, drift * 1000 / 90, corr);
|
||||||
audio_decoder->DriftCorr, drift * 1000 / 90, corr);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1272,16 +1232,13 @@ static void CodecAudioUpdateFormat(AudioDecoder * audio_decoder)
|
|||||||
audio_ctx = audio_decoder->AudioCtx;
|
audio_ctx = audio_decoder->AudioCtx;
|
||||||
if ((err = CodecAudioUpdateHelper(audio_decoder, &passthrough))) {
|
if ((err = CodecAudioUpdateHelper(audio_decoder, &passthrough))) {
|
||||||
|
|
||||||
Debug(3, "codec/audio: resample %dHz *%d -> %dHz *%d err %d\n",
|
Debug(3, "codec/audio: resample %dHz *%d -> %dHz *%d err %d\n", audio_ctx->sample_rate, audio_ctx->channels,
|
||||||
audio_ctx->sample_rate, audio_ctx->channels,
|
audio_decoder->HwSampleRate, audio_decoder->HwChannels, err);
|
||||||
audio_decoder->HwSampleRate, audio_decoder->HwChannels,err);
|
|
||||||
|
|
||||||
if (err == 1) {
|
if (err == 1) {
|
||||||
audio_decoder->ReSample =
|
audio_decoder->ReSample =
|
||||||
av_audio_resample_init(audio_decoder->HwChannels,
|
av_audio_resample_init(audio_decoder->HwChannels, audio_ctx->channels, audio_decoder->HwSampleRate,
|
||||||
audio_ctx->channels, audio_decoder->HwSampleRate,
|
audio_ctx->sample_rate, audio_ctx->sample_fmt, audio_ctx->sample_fmt, 16, 10, 0, 0.8);
|
||||||
audio_ctx->sample_rate, audio_ctx->sample_fmt,
|
|
||||||
audio_ctx->sample_fmt, 16, 10, 0, 0.8);
|
|
||||||
// libav-0.8_pre didn't support 6 -> 2 channels
|
// libav-0.8_pre didn't support 6 -> 2 channels
|
||||||
if (!audio_decoder->ReSample) {
|
if (!audio_decoder->ReSample) {
|
||||||
Error(_("codec/audio: resample setup error\n"));
|
Error(_("codec/audio: resample setup error\n"));
|
||||||
@ -1306,16 +1263,14 @@ static void CodecAudioUpdateFormat(AudioDecoder * audio_decoder)
|
|||||||
Error(_("codec/audio: overwrite resample\n"));
|
Error(_("codec/audio: overwrite resample\n"));
|
||||||
}
|
}
|
||||||
audio_decoder->AvResample =
|
audio_decoder->AvResample =
|
||||||
av_resample_init(audio_decoder->HwSampleRate,
|
av_resample_init(audio_decoder->HwSampleRate, audio_decoder->HwSampleRate, 16, 10, 0, 0.8);
|
||||||
audio_decoder->HwSampleRate, 16, 10, 0, 0.8);
|
|
||||||
if (!audio_decoder->AvResample) {
|
if (!audio_decoder->AvResample) {
|
||||||
Error(_("codec/audio: AvResample setup error\n"));
|
Error(_("codec/audio: AvResample setup error\n"));
|
||||||
} else {
|
} else {
|
||||||
// reset drift to some default value
|
// reset drift to some default value
|
||||||
audio_decoder->DriftCorr /= 2;
|
audio_decoder->DriftCorr /= 2;
|
||||||
audio_decoder->DriftFrac = 0;
|
audio_decoder->DriftFrac = 0;
|
||||||
av_resample_compensate(audio_decoder->AvResample,
|
av_resample_compensate(audio_decoder->AvResample, audio_decoder->DriftCorr / 10,
|
||||||
audio_decoder->DriftCorr / 10,
|
|
||||||
10 * audio_decoder->HwSampleRate);
|
10 * audio_decoder->HwSampleRate);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1333,8 +1288,8 @@ void CodecAudioEnqueue(AudioDecoder * audio_decoder, int16_t * data, int count)
|
|||||||
{
|
{
|
||||||
#ifdef USE_AUDIO_DRIFT_CORRECTION
|
#ifdef USE_AUDIO_DRIFT_CORRECTION
|
||||||
if ((CodecAudioDrift & CORRECT_PCM) && audio_decoder->AvResample) {
|
if ((CodecAudioDrift & CORRECT_PCM) && audio_decoder->AvResample) {
|
||||||
int16_t buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 +
|
int16_t buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 + AV_INPUT_BUFFER_PADDING_SIZE]
|
||||||
AV_INPUT_BUFFER_PADDING_SIZE] __attribute__ ((aligned(16)));
|
__attribute__((aligned(16)));
|
||||||
int16_t buftmp[MAX_CHANNELS][(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4];
|
int16_t buftmp[MAX_CHANNELS][(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4];
|
||||||
int consumed;
|
int consumed;
|
||||||
int i;
|
int i;
|
||||||
@ -1347,15 +1302,12 @@ void CodecAudioEnqueue(AudioDecoder * audio_decoder, int16_t * data, int count)
|
|||||||
if (audio_decoder->RemainCount + bytes_n > audio_decoder->BufferSize) {
|
if (audio_decoder->RemainCount + bytes_n > audio_decoder->BufferSize) {
|
||||||
audio_decoder->BufferSize = audio_decoder->RemainCount + bytes_n;
|
audio_decoder->BufferSize = audio_decoder->RemainCount + bytes_n;
|
||||||
for (ch = 0; ch < MAX_CHANNELS; ++ch) {
|
for (ch = 0; ch < MAX_CHANNELS; ++ch) {
|
||||||
audio_decoder->Buffer[ch] =
|
audio_decoder->Buffer[ch] = realloc(audio_decoder->Buffer[ch], audio_decoder->BufferSize);
|
||||||
realloc(audio_decoder->Buffer[ch],
|
|
||||||
audio_decoder->BufferSize);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
// copy remaining bytes into sample buffer
|
// copy remaining bytes into sample buffer
|
||||||
for (ch = 0; ch < audio_decoder->HwChannels; ++ch) {
|
for (ch = 0; ch < audio_decoder->HwChannels; ++ch) {
|
||||||
memcpy(audio_decoder->Buffer[ch], audio_decoder->Remain[ch],
|
memcpy(audio_decoder->Buffer[ch], audio_decoder->Remain[ch], audio_decoder->RemainCount);
|
||||||
audio_decoder->RemainCount);
|
|
||||||
}
|
}
|
||||||
// deinterleave samples into sample buffer
|
// deinterleave samples into sample buffer
|
||||||
for (i = 0; i < bytes_n / 2; i++) {
|
for (i = 0; i < bytes_n / 2; i++) {
|
||||||
@ -1369,18 +1321,14 @@ void CodecAudioEnqueue(AudioDecoder * audio_decoder, int16_t * data, int count)
|
|||||||
n = 0; // keep gcc lucky
|
n = 0; // keep gcc lucky
|
||||||
// resample the sample buffer into tmp buffer
|
// resample the sample buffer into tmp buffer
|
||||||
for (ch = 0; ch < audio_decoder->HwChannels; ++ch) {
|
for (ch = 0; ch < audio_decoder->HwChannels; ++ch) {
|
||||||
n = av_resample(audio_decoder->AvResample, buftmp[ch],
|
n = av_resample(audio_decoder->AvResample, buftmp[ch], audio_decoder->Buffer[ch], &consumed, bytes_n / 2,
|
||||||
audio_decoder->Buffer[ch], &consumed, bytes_n / 2,
|
|
||||||
sizeof(buftmp[ch]) / 2, ch == audio_decoder->HwChannels - 1);
|
sizeof(buftmp[ch]) / 2, ch == audio_decoder->HwChannels - 1);
|
||||||
// fixme remaining channels
|
// fixme remaining channels
|
||||||
if (bytes_n - consumed * 2 > audio_decoder->RemainSize) {
|
if (bytes_n - consumed * 2 > audio_decoder->RemainSize) {
|
||||||
audio_decoder->RemainSize = bytes_n - consumed * 2;
|
audio_decoder->RemainSize = bytes_n - consumed * 2;
|
||||||
}
|
}
|
||||||
audio_decoder->Remain[ch] =
|
audio_decoder->Remain[ch] = realloc(audio_decoder->Remain[ch], audio_decoder->RemainSize);
|
||||||
realloc(audio_decoder->Remain[ch], audio_decoder->RemainSize);
|
memcpy(audio_decoder->Remain[ch], audio_decoder->Buffer[ch] + consumed, audio_decoder->RemainSize);
|
||||||
memcpy(audio_decoder->Remain[ch],
|
|
||||||
audio_decoder->Buffer[ch] + consumed,
|
|
||||||
audio_decoder->RemainSize);
|
|
||||||
audio_decoder->RemainCount = audio_decoder->RemainSize;
|
audio_decoder->RemainCount = audio_decoder->RemainSize;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1406,9 +1354,7 @@ void CodecAudioEnqueue(AudioDecoder * audio_decoder, int16_t * data, int count)
|
|||||||
AudioEnqueue(data, count);
|
AudioEnqueue(data, count);
|
||||||
}
|
}
|
||||||
|
|
||||||
int myavcodec_decode_audio3(AVCodecContext *avctx, int16_t *samples,
|
int myavcodec_decode_audio3(AVCodecContext * avctx, int16_t * samples, int *frame_size_ptr, AVPacket * avpkt)
|
||||||
int *frame_size_ptr,
|
|
||||||
AVPacket *avpkt)
|
|
||||||
{
|
{
|
||||||
AVFrame *frame = av_frame_alloc();
|
AVFrame *frame = av_frame_alloc();
|
||||||
int ret, got_frame = 0;
|
int ret, got_frame = 0;
|
||||||
@ -1424,7 +1370,7 @@ int myavcodec_decode_audio3(AVCodecContext *avctx, int16_t *samples,
|
|||||||
// into separate routines or separate threads.
|
// into separate routines or separate threads.
|
||||||
// Also now that it always consumes a whole buffer some code
|
// Also now that it always consumes a whole buffer some code
|
||||||
// in the caller may be able to be optimized.
|
// in the caller may be able to be optimized.
|
||||||
ret = avcodec_receive_frame(avctx,frame);
|
ret = avcodec_receive_frame(avctx, frame);
|
||||||
if (ret == 0)
|
if (ret == 0)
|
||||||
got_frame = 1;
|
got_frame = 1;
|
||||||
if (ret == AVERROR(EAGAIN))
|
if (ret == AVERROR(EAGAIN))
|
||||||
@ -1433,27 +1379,26 @@ int myavcodec_decode_audio3(AVCodecContext *avctx, int16_t *samples,
|
|||||||
ret = avcodec_send_packet(avctx, avpkt);
|
ret = avcodec_send_packet(avctx, avpkt);
|
||||||
if (ret == AVERROR(EAGAIN))
|
if (ret == AVERROR(EAGAIN))
|
||||||
ret = 0;
|
ret = 0;
|
||||||
else if (ret < 0)
|
else if (ret < 0) {
|
||||||
{
|
|
||||||
// Debug(3, "codec/audio: audio decode error: %1 (%2)\n",av_make_error_string(error, sizeof(error), ret),got_frame);
|
// Debug(3, "codec/audio: audio decode error: %1 (%2)\n",av_make_error_string(error, sizeof(error), ret),got_frame);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
} else
|
||||||
else
|
|
||||||
ret = avpkt->size;
|
ret = avpkt->size;
|
||||||
#endif
|
#endif
|
||||||
if (ret >= 0 && got_frame) {
|
if (ret >= 0 && got_frame) {
|
||||||
int i,ch;
|
int i, ch;
|
||||||
int planar = av_sample_fmt_is_planar(avctx->sample_fmt);
|
int planar = av_sample_fmt_is_planar(avctx->sample_fmt);
|
||||||
int data_size = av_get_bytes_per_sample(avctx->sample_fmt);
|
int data_size = av_get_bytes_per_sample(avctx->sample_fmt);
|
||||||
|
|
||||||
if (data_size < 0) {
|
if (data_size < 0) {
|
||||||
/* This should not occur, checking just for paranoia */
|
/* This should not occur, checking just for paranoia */
|
||||||
fprintf(stderr, "Failed to calculate data size\n");
|
fprintf(stderr, "Failed to calculate data size\n");
|
||||||
exit(1);
|
exit(1);
|
||||||
}
|
}
|
||||||
for (i=0; i<frame->nb_samples; i++) {
|
for (i = 0; i < frame->nb_samples; i++) {
|
||||||
for (ch=0; ch < avctx->channels; ch++) {
|
for (ch = 0; ch < avctx->channels; ch++) {
|
||||||
memcpy(samples,frame->extended_data[ch]+data_size*i,data_size);
|
memcpy(samples, frame->extended_data[ch] + data_size * i, data_size);
|
||||||
samples = (char *) samples + data_size;
|
samples = (char *)samples + data_size;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//Debug(3,"data_size %d nb_samples %d sample_fmt %d channels %d planar %d\n",data_size,frame->nb_samples,avctx->sample_fmt,avctx->channels,planar);
|
//Debug(3,"data_size %d nb_samples %d sample_fmt %d channels %d planar %d\n",data_size,frame->nb_samples,avctx->sample_fmt,avctx->channels,planar);
|
||||||
@ -1463,8 +1408,7 @@ int myavcodec_decode_audio3(AVCodecContext *avctx, int16_t *samples,
|
|||||||
}
|
}
|
||||||
av_frame_free(&frame);
|
av_frame_free(&frame);
|
||||||
return ret;
|
return ret;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
** Decode an audio packet.
|
** Decode an audio packet.
|
||||||
@ -1476,8 +1420,7 @@ int myavcodec_decode_audio3(AVCodecContext *avctx, int16_t *samples,
|
|||||||
*/
|
*/
|
||||||
void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt)
|
void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt)
|
||||||
{
|
{
|
||||||
int16_t buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 +
|
int16_t buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 + AV_INPUT_BUFFER_PADDING_SIZE] __attribute__((aligned(16)));
|
||||||
AV_INPUT_BUFFER_PADDING_SIZE] __attribute__ ((aligned(16)));
|
|
||||||
int buf_sz;
|
int buf_sz;
|
||||||
int l;
|
int l;
|
||||||
AVCodecContext *audio_ctx;
|
AVCodecContext *audio_ctx;
|
||||||
@ -1503,8 +1446,7 @@ void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt)
|
|||||||
CodecAudioSetClock(audio_decoder, avpkt->pts);
|
CodecAudioSetClock(audio_decoder, avpkt->pts);
|
||||||
}
|
}
|
||||||
// FIXME: must first play remainings bytes, than change and play new.
|
// FIXME: must first play remainings bytes, than change and play new.
|
||||||
if (audio_decoder->Passthrough != CodecPassthrough
|
if (audio_decoder->Passthrough != CodecPassthrough || audio_decoder->SampleRate != audio_ctx->sample_rate
|
||||||
|| audio_decoder->SampleRate != audio_ctx->sample_rate
|
|
||||||
|| audio_decoder->Channels != audio_ctx->channels) {
|
|| audio_decoder->Channels != audio_ctx->channels) {
|
||||||
CodecAudioUpdateFormat(audio_decoder);
|
CodecAudioUpdateFormat(audio_decoder);
|
||||||
}
|
}
|
||||||
@ -1512,9 +1454,8 @@ void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt)
|
|||||||
if (audio_decoder->HwSampleRate && audio_decoder->HwChannels) {
|
if (audio_decoder->HwSampleRate && audio_decoder->HwChannels) {
|
||||||
// need to resample audio
|
// need to resample audio
|
||||||
if (audio_decoder->ReSample) {
|
if (audio_decoder->ReSample) {
|
||||||
int16_t outbuf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 +
|
int16_t outbuf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 + AV_INPUT_BUFFER_PADDING_SIZE]
|
||||||
AV_INPUT_BUFFER_PADDING_SIZE]
|
__attribute__((aligned(16)));
|
||||||
__attribute__ ((aligned(16)));
|
|
||||||
int outlen;
|
int outlen;
|
||||||
|
|
||||||
// FIXME: libav-0.7.2 crash here
|
// FIXME: libav-0.7.2 crash here
|
||||||
@ -1526,11 +1467,8 @@ void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt)
|
|||||||
#endif
|
#endif
|
||||||
if (outlen) {
|
if (outlen) {
|
||||||
// outlen seems to be wrong in ffmpeg-0.9
|
// outlen seems to be wrong in ffmpeg-0.9
|
||||||
outlen /= audio_decoder->Channels *
|
outlen /= audio_decoder->Channels * av_get_bytes_per_sample(audio_ctx->sample_fmt);
|
||||||
av_get_bytes_per_sample(audio_ctx->sample_fmt);
|
outlen *= audio_decoder->HwChannels * av_get_bytes_per_sample(audio_ctx->sample_fmt);
|
||||||
outlen *=
|
|
||||||
audio_decoder->HwChannels *
|
|
||||||
av_get_bytes_per_sample(audio_ctx->sample_fmt);
|
|
||||||
Debug(4, "codec/audio: %d -> %d\n", buf_sz, outlen);
|
Debug(4, "codec/audio: %d -> %d\n", buf_sz, outlen);
|
||||||
CodecAudioEnqueue(audio_decoder, outbuf, outlen);
|
CodecAudioEnqueue(audio_decoder, outbuf, outlen);
|
||||||
}
|
}
|
||||||
@ -1577,8 +1515,7 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
|
|||||||
audio_decoder->LastDelay = delay;
|
audio_decoder->LastDelay = delay;
|
||||||
audio_decoder->Drift = 0;
|
audio_decoder->Drift = 0;
|
||||||
audio_decoder->DriftFrac = 0;
|
audio_decoder->DriftFrac = 0;
|
||||||
Debug(3, "codec/audio: inital drift delay %" PRId64 "ms\n",
|
Debug(3, "codec/audio: inital drift delay %" PRId64 "ms\n", delay / 90);
|
||||||
delay / 90);
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
// collect over some time
|
// collect over some time
|
||||||
@ -1588,12 +1525,9 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
|
|||||||
}
|
}
|
||||||
|
|
||||||
tim_diff = (nowtime.tv_sec - audio_decoder->LastTime.tv_sec)
|
tim_diff = (nowtime.tv_sec - audio_decoder->LastTime.tv_sec)
|
||||||
* 1000 * 1000 * 1000 + (nowtime.tv_nsec -
|
* 1000 * 1000 * 1000 + (nowtime.tv_nsec - audio_decoder->LastTime.tv_nsec);
|
||||||
audio_decoder->LastTime.tv_nsec);
|
|
||||||
|
|
||||||
drift =
|
drift = (tim_diff * 90) / (1000 * 1000) - pts_diff + delay - audio_decoder->LastDelay;
|
||||||
(tim_diff * 90) / (1000 * 1000) - pts_diff + delay -
|
|
||||||
audio_decoder->LastDelay;
|
|
||||||
|
|
||||||
// adjust rounding error
|
// adjust rounding error
|
||||||
nowtime.tv_nsec -= nowtime.tv_nsec % (1000 * 1000 / 90);
|
nowtime.tv_nsec -= nowtime.tv_nsec % (1000 * 1000 / 90);
|
||||||
@ -1602,16 +1536,13 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
|
|||||||
audio_decoder->LastDelay = delay;
|
audio_decoder->LastDelay = delay;
|
||||||
|
|
||||||
if (0) {
|
if (0) {
|
||||||
Debug(3,
|
Debug(3, "codec/audio: interval P:%5" PRId64 "ms T:%5" PRId64 "ms D:%4" PRId64 "ms %f %d\n", pts_diff / 90,
|
||||||
"codec/audio: interval P:%5" PRId64 "ms T:%5" PRId64 "ms D:%4"
|
tim_diff / (1000 * 1000), delay / 90, drift / 90.0, audio_decoder->DriftCorr);
|
||||||
PRId64 "ms %f %d\n", pts_diff / 90, tim_diff / (1000 * 1000),
|
|
||||||
delay / 90, drift / 90.0, audio_decoder->DriftCorr);
|
|
||||||
}
|
}
|
||||||
// underruns and av_resample have the same time :(((
|
// underruns and av_resample have the same time :(((
|
||||||
if (abs(drift) > 10 * 90) {
|
if (abs(drift) > 10 * 90) {
|
||||||
// drift too big, pts changed?
|
// drift too big, pts changed?
|
||||||
Debug(3, "codec/audio: drift(%6d) %3dms reset\n",
|
Debug(3, "codec/audio: drift(%6d) %3dms reset\n", audio_decoder->DriftCorr, drift / 90);
|
||||||
audio_decoder->DriftCorr, drift / 90);
|
|
||||||
audio_decoder->LastDelay = 0;
|
audio_decoder->LastDelay = 0;
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
corr = 0; // keep gcc happy
|
corr = 0; // keep gcc happy
|
||||||
@ -1646,8 +1577,7 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
|
|||||||
} else {
|
} else {
|
||||||
distance = (pts_diff * audio_decoder->HwSampleRate) / (90 * 1000);
|
distance = (pts_diff * audio_decoder->HwSampleRate) / (90 * 1000);
|
||||||
}
|
}
|
||||||
if (swr_set_compensation(audio_decoder->Resample,
|
if (swr_set_compensation(audio_decoder->Resample, audio_decoder->DriftCorr / 10, distance)) {
|
||||||
audio_decoder->DriftCorr / 10, distance)) {
|
|
||||||
Debug(3, "codec/audio: swr_set_compensation failed\n");
|
Debug(3, "codec/audio: swr_set_compensation failed\n");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1657,8 +1587,7 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
|
|||||||
int distance;
|
int distance;
|
||||||
|
|
||||||
distance = (pts_diff * audio_decoder->HwSampleRate) / (900 * 1000);
|
distance = (pts_diff * audio_decoder->HwSampleRate) / (900 * 1000);
|
||||||
if (avresample_set_compensation(audio_decoder->Resample,
|
if (avresample_set_compensation(audio_decoder->Resample, audio_decoder->DriftCorr / 10, distance)) {
|
||||||
audio_decoder->DriftCorr / 10, distance)) {
|
|
||||||
Debug(3, "codec/audio: swr_set_compensation failed\n");
|
Debug(3, "codec/audio: swr_set_compensation failed\n");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1667,8 +1596,7 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
|
|||||||
static int c;
|
static int c;
|
||||||
|
|
||||||
if (!(c++ % 10)) {
|
if (!(c++ % 10)) {
|
||||||
Debug(3, "codec/audio: drift(%6d) %8dus %5d\n",
|
Debug(3, "codec/audio: drift(%6d) %8dus %5d\n", audio_decoder->DriftCorr, drift * 1000 / 90, corr);
|
||||||
audio_decoder->DriftCorr, drift * 1000 / 90, corr);
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#else
|
#else
|
||||||
@ -1697,8 +1625,7 @@ static void CodecAudioUpdateFormat(AudioDecoder * audio_decoder)
|
|||||||
audio_ctx = audio_decoder->AudioCtx;
|
audio_ctx = audio_decoder->AudioCtx;
|
||||||
|
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
if (audio_ctx->sample_fmt == AV_SAMPLE_FMT_S16
|
if (audio_ctx->sample_fmt == AV_SAMPLE_FMT_S16 && audio_ctx->sample_rate == audio_decoder->HwSampleRate
|
||||||
&& audio_ctx->sample_rate == audio_decoder->HwSampleRate
|
|
||||||
&& !CodecAudioDrift) {
|
&& !CodecAudioDrift) {
|
||||||
// FIXME: use Resample only, when it is needed!
|
// FIXME: use Resample only, when it is needed!
|
||||||
fprintf(stderr, "no resample needed\n");
|
fprintf(stderr, "no resample needed\n");
|
||||||
@ -1707,10 +1634,9 @@ static void CodecAudioUpdateFormat(AudioDecoder * audio_decoder)
|
|||||||
|
|
||||||
#ifdef USE_SWRESAMPLE
|
#ifdef USE_SWRESAMPLE
|
||||||
audio_decoder->Resample =
|
audio_decoder->Resample =
|
||||||
swr_alloc_set_opts(audio_decoder->Resample, audio_ctx->channel_layout,
|
swr_alloc_set_opts(audio_decoder->Resample, audio_ctx->channel_layout, AV_SAMPLE_FMT_S16,
|
||||||
AV_SAMPLE_FMT_S16, audio_decoder->HwSampleRate,
|
audio_decoder->HwSampleRate, audio_ctx->channel_layout, audio_ctx->sample_fmt, audio_ctx->sample_rate, 0,
|
||||||
audio_ctx->channel_layout, audio_ctx->sample_fmt,
|
NULL);
|
||||||
audio_ctx->sample_rate, 0, NULL);
|
|
||||||
if (audio_decoder->Resample) {
|
if (audio_decoder->Resample) {
|
||||||
swr_init(audio_decoder->Resample);
|
swr_init(audio_decoder->Resample);
|
||||||
} else {
|
} else {
|
||||||
@ -1723,18 +1649,12 @@ static void CodecAudioUpdateFormat(AudioDecoder * audio_decoder)
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
av_opt_set_int(audio_decoder->Resample, "in_channel_layout",
|
av_opt_set_int(audio_decoder->Resample, "in_channel_layout", audio_ctx->channel_layout, 0);
|
||||||
audio_ctx->channel_layout, 0);
|
av_opt_set_int(audio_decoder->Resample, "in_sample_fmt", audio_ctx->sample_fmt, 0);
|
||||||
av_opt_set_int(audio_decoder->Resample, "in_sample_fmt",
|
av_opt_set_int(audio_decoder->Resample, "in_sample_rate", audio_ctx->sample_rate, 0);
|
||||||
audio_ctx->sample_fmt, 0);
|
av_opt_set_int(audio_decoder->Resample, "out_channel_layout", audio_ctx->channel_layout, 0);
|
||||||
av_opt_set_int(audio_decoder->Resample, "in_sample_rate",
|
av_opt_set_int(audio_decoder->Resample, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0);
|
||||||
audio_ctx->sample_rate, 0);
|
av_opt_set_int(audio_decoder->Resample, "out_sample_rate", audio_decoder->HwSampleRate, 0);
|
||||||
av_opt_set_int(audio_decoder->Resample, "out_channel_layout",
|
|
||||||
audio_ctx->channel_layout, 0);
|
|
||||||
av_opt_set_int(audio_decoder->Resample, "out_sample_fmt",
|
|
||||||
AV_SAMPLE_FMT_S16, 0);
|
|
||||||
av_opt_set_int(audio_decoder->Resample, "out_sample_rate",
|
|
||||||
audio_decoder->HwSampleRate, 0);
|
|
||||||
|
|
||||||
if (avresample_open(audio_decoder->Resample)) {
|
if (avresample_open(audio_decoder->Resample)) {
|
||||||
avresample_free(&audio_decoder->Resample);
|
avresample_free(&audio_decoder->Resample);
|
||||||
@ -1799,7 +1719,8 @@ void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt)
|
|||||||
uint8_t *out[1];
|
uint8_t *out[1];
|
||||||
|
|
||||||
out[0] = outbuf;
|
out[0] = outbuf;
|
||||||
ret = swr_convert(audio_decoder->Resample, out, sizeof(outbuf) / (2 * audio_decoder->HwChannels),
|
ret =
|
||||||
|
swr_convert(audio_decoder->Resample, out, sizeof(outbuf) / (2 * audio_decoder->HwChannels),
|
||||||
(const uint8_t **)frame->extended_data, frame->nb_samples);
|
(const uint8_t **)frame->extended_data, frame->nb_samples);
|
||||||
if (ret > 0) {
|
if (ret > 0) {
|
||||||
if (!(audio_decoder->Passthrough & CodecPCM)) {
|
if (!(audio_decoder->Passthrough & CodecPCM)) {
|
||||||
@ -1833,10 +1754,10 @@ void CodecAudioFlushBuffers(AudioDecoder * decoder)
|
|||||||
/**
|
/**
|
||||||
** Empty log callback
|
** Empty log callback
|
||||||
*/
|
*/
|
||||||
static void CodecNoopCallback( __attribute__ ((unused))
|
static void CodecNoopCallback( __attribute__((unused))
|
||||||
void *ptr, __attribute__ ((unused))
|
void *ptr, __attribute__((unused))
|
||||||
int level, __attribute__ ((unused))
|
int level, __attribute__((unused))
|
||||||
const char *fmt, __attribute__ ((unused)) va_list vl)
|
const char *fmt, __attribute__((unused)) va_list vl)
|
||||||
{
|
{
|
||||||
}
|
}
|
||||||
|
|
||||||
|
12
codec.h
12
codec.h
@ -35,7 +35,8 @@
|
|||||||
|
|
||||||
#define AVCODEC_MAX_AUDIO_FRAME_SIZE 192000
|
#define AVCODEC_MAX_AUDIO_FRAME_SIZE 192000
|
||||||
|
|
||||||
enum HWAccelID {
|
enum HWAccelID
|
||||||
|
{
|
||||||
HWACCEL_NONE = 0,
|
HWACCEL_NONE = 0,
|
||||||
HWACCEL_AUTO,
|
HWACCEL_AUTO,
|
||||||
HWACCEL_VDPAU,
|
HWACCEL_VDPAU,
|
||||||
@ -48,6 +49,7 @@ enum HWAccelID {
|
|||||||
};
|
};
|
||||||
|
|
||||||
extern AVBufferRef *hw_device_ctx;
|
extern AVBufferRef *hw_device_ctx;
|
||||||
|
|
||||||
///
|
///
|
||||||
/// Video decoder structure.
|
/// Video decoder structure.
|
||||||
///
|
///
|
||||||
@ -73,9 +75,9 @@ struct _video_decoder_
|
|||||||
/* hwaccel context */
|
/* hwaccel context */
|
||||||
enum HWAccelID active_hwaccel_id;
|
enum HWAccelID active_hwaccel_id;
|
||||||
void *hwaccel_ctx;
|
void *hwaccel_ctx;
|
||||||
void (*hwaccel_uninit)(AVCodecContext *s);
|
void (*hwaccel_uninit)(AVCodecContext * s);
|
||||||
int (*hwaccel_get_buffer)(AVCodecContext *s, AVFrame *frame, int flags);
|
int (*hwaccel_get_buffer)(AVCodecContext * s, AVFrame * frame, int flags);
|
||||||
int (*hwaccel_retrieve_data)(AVCodecContext *s, AVFrame *frame);
|
int (*hwaccel_retrieve_data)(AVCodecContext * s, AVFrame * frame);
|
||||||
enum AVPixelFormat hwaccel_pix_fmt;
|
enum AVPixelFormat hwaccel_pix_fmt;
|
||||||
enum AVPixelFormat hwaccel_retrieved_pix_fmt;
|
enum AVPixelFormat hwaccel_retrieved_pix_fmt;
|
||||||
AVBufferRef *hw_frames_ctx;
|
AVBufferRef *hw_frames_ctx;
|
||||||
@ -86,8 +88,6 @@ struct _video_decoder_
|
|||||||
// From VO
|
// From VO
|
||||||
struct mp_hwdec_devices *hwdec_devs;
|
struct mp_hwdec_devices *hwdec_devs;
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
};
|
};
|
||||||
|
|
||||||
//----------------------------------------------------------------------------
|
//----------------------------------------------------------------------------
|
||||||
|
247
common.h
247
common.h
@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
/*
|
/*
|
||||||
* This file is part of mpv.
|
* This file is part of mpv.
|
||||||
*
|
*
|
||||||
@ -52,7 +53,8 @@
|
|||||||
struct GL;
|
struct GL;
|
||||||
typedef struct GL GL;
|
typedef struct GL GL;
|
||||||
|
|
||||||
enum {
|
enum
|
||||||
|
{
|
||||||
MPGL_CAP_ROW_LENGTH = (1 << 4), // GL_[UN]PACK_ROW_LENGTH
|
MPGL_CAP_ROW_LENGTH = (1 << 4), // GL_[UN]PACK_ROW_LENGTH
|
||||||
MPGL_CAP_FB = (1 << 5),
|
MPGL_CAP_FB = (1 << 5),
|
||||||
MPGL_CAP_VAO = (1 << 6),
|
MPGL_CAP_VAO = (1 << 6),
|
||||||
@ -78,16 +80,15 @@ enum {
|
|||||||
|
|
||||||
#define MPGL_VER_P(ver) MPGL_VER_GET_MAJOR(ver), MPGL_VER_GET_MINOR(ver)
|
#define MPGL_VER_P(ver) MPGL_VER_GET_MAJOR(ver), MPGL_VER_GET_MINOR(ver)
|
||||||
|
|
||||||
void mpgl_load_functions(GL *gl, void *(*getProcAddress)(const GLubyte *),
|
void mpgl_load_functions(GL * gl, void *(*getProcAddress)(const GLubyte *), const char *ext2, struct mp_log *log);
|
||||||
const char *ext2, struct mp_log *log);
|
void mpgl_load_functions2(GL * gl, void *(*get_fn)(void *ctx, const char *n), void *fn_ctx, const char *ext2,
|
||||||
void mpgl_load_functions2(GL *gl, void *(*get_fn)(void *ctx, const char *n),
|
struct mp_log *log);
|
||||||
void *fn_ctx, const char *ext2, struct mp_log *log);
|
|
||||||
|
|
||||||
typedef void (GLAPIENTRY *MP_GLDEBUGPROC)(GLenum, GLenum, GLuint, GLenum,
|
typedef void (GLAPIENTRY * MP_GLDEBUGPROC) (GLenum, GLenum, GLuint, GLenum, GLsizei, const GLchar *, const void *);
|
||||||
GLsizei, const GLchar *,const void *);
|
|
||||||
|
|
||||||
//function pointers loaded from the OpenGL library
|
//function pointers loaded from the OpenGL library
|
||||||
struct GL {
|
struct GL
|
||||||
|
{
|
||||||
int version; // MPGL_VER() mangled (e.g. 210 for 2.1)
|
int version; // MPGL_VER() mangled (e.g. 210 for 2.1)
|
||||||
int es; // es version (e.g. 300), 0 for desktop GL
|
int es; // es version (e.g. 300), 0 for desktop GL
|
||||||
int glsl_version; // e.g. 130 for GLSL 1.30
|
int glsl_version; // e.g. 130 for GLSL 1.30
|
||||||
@ -96,149 +97,129 @@ struct GL {
|
|||||||
bool debug_context; // use of e.g. GLX_CONTEXT_DEBUG_BIT_ARB
|
bool debug_context; // use of e.g. GLX_CONTEXT_DEBUG_BIT_ARB
|
||||||
GLuint main_fb; // framebuffer to render to (normally 0)
|
GLuint main_fb; // framebuffer to render to (normally 0)
|
||||||
|
|
||||||
void (GLAPIENTRY *Viewport)(GLint, GLint, GLsizei, GLsizei);
|
void (GLAPIENTRY * Viewport) (GLint, GLint, GLsizei, GLsizei);
|
||||||
void (GLAPIENTRY *Clear)(GLbitfield);
|
void (GLAPIENTRY * Clear) (GLbitfield);
|
||||||
void (GLAPIENTRY *GenTextures)(GLsizei, GLuint *);
|
void (GLAPIENTRY * GenTextures) (GLsizei, GLuint *);
|
||||||
void (GLAPIENTRY *DeleteTextures)(GLsizei, const GLuint *);
|
void (GLAPIENTRY * DeleteTextures) (GLsizei, const GLuint *);
|
||||||
void (GLAPIENTRY *ClearColor)(GLclampf, GLclampf, GLclampf, GLclampf);
|
void (GLAPIENTRY * ClearColor) (GLclampf, GLclampf, GLclampf, GLclampf);
|
||||||
void (GLAPIENTRY *Enable)(GLenum);
|
void (GLAPIENTRY * Enable) (GLenum);
|
||||||
void (GLAPIENTRY *Disable)(GLenum);
|
void (GLAPIENTRY * Disable) (GLenum);
|
||||||
const GLubyte *(GLAPIENTRY * GetString)(GLenum);
|
const GLubyte *(GLAPIENTRY * GetString) (GLenum);
|
||||||
void (GLAPIENTRY *BlendFuncSeparate)(GLenum, GLenum, GLenum, GLenum);
|
void (GLAPIENTRY * BlendFuncSeparate) (GLenum, GLenum, GLenum, GLenum);
|
||||||
void (GLAPIENTRY *Flush)(void);
|
void (GLAPIENTRY * Flush) (void);
|
||||||
void (GLAPIENTRY *Finish)(void);
|
void (GLAPIENTRY * Finish) (void);
|
||||||
void (GLAPIENTRY *PixelStorei)(GLenum, GLint);
|
void (GLAPIENTRY * PixelStorei) (GLenum, GLint);
|
||||||
void (GLAPIENTRY *TexImage1D)(GLenum, GLint, GLint, GLsizei, GLint,
|
void (GLAPIENTRY * TexImage1D) (GLenum, GLint, GLint, GLsizei, GLint, GLenum, GLenum, const GLvoid *);
|
||||||
GLenum, GLenum, const GLvoid *);
|
void (GLAPIENTRY * TexImage2D) (GLenum, GLint, GLint, GLsizei, GLsizei, GLint, GLenum, GLenum, const GLvoid *);
|
||||||
void (GLAPIENTRY *TexImage2D)(GLenum, GLint, GLint, GLsizei, GLsizei,
|
void (GLAPIENTRY * TexSubImage2D) (GLenum, GLint, GLint, GLint, GLsizei, GLsizei, GLenum, GLenum, const GLvoid *);
|
||||||
GLint, GLenum, GLenum, const GLvoid *);
|
void (GLAPIENTRY * TexParameteri) (GLenum, GLenum, GLint);
|
||||||
void (GLAPIENTRY *TexSubImage2D)(GLenum, GLint, GLint, GLint,
|
void (GLAPIENTRY * GetIntegerv) (GLenum, GLint *);
|
||||||
GLsizei, GLsizei, GLenum, GLenum,
|
void (GLAPIENTRY * ReadPixels) (GLint, GLint, GLsizei, GLsizei, GLenum, GLenum, GLvoid *);
|
||||||
const GLvoid *);
|
void (GLAPIENTRY * ReadBuffer) (GLenum);
|
||||||
void (GLAPIENTRY *TexParameteri)(GLenum, GLenum, GLint);
|
void (GLAPIENTRY * DrawArrays) (GLenum, GLint, GLsizei);
|
||||||
void (GLAPIENTRY *GetIntegerv)(GLenum, GLint *);
|
GLenum(GLAPIENTRY * GetError) (void);
|
||||||
void (GLAPIENTRY *ReadPixels)(GLint, GLint, GLsizei, GLsizei, GLenum,
|
void (GLAPIENTRY * GetTexLevelParameteriv) (GLenum, GLint, GLenum, GLint *);
|
||||||
GLenum, GLvoid *);
|
void (GLAPIENTRY * Scissor) (GLint, GLint, GLsizei, GLsizei);
|
||||||
void (GLAPIENTRY *ReadBuffer)(GLenum);
|
|
||||||
void (GLAPIENTRY *DrawArrays)(GLenum, GLint, GLsizei);
|
|
||||||
GLenum (GLAPIENTRY *GetError)(void);
|
|
||||||
void (GLAPIENTRY *GetTexLevelParameteriv)(GLenum, GLint, GLenum, GLint *);
|
|
||||||
void (GLAPIENTRY *Scissor)(GLint, GLint, GLsizei, GLsizei);
|
|
||||||
|
|
||||||
void (GLAPIENTRY *GenBuffers)(GLsizei, GLuint *);
|
void (GLAPIENTRY * GenBuffers) (GLsizei, GLuint *);
|
||||||
void (GLAPIENTRY *DeleteBuffers)(GLsizei, const GLuint *);
|
void (GLAPIENTRY * DeleteBuffers) (GLsizei, const GLuint *);
|
||||||
void (GLAPIENTRY *BindBuffer)(GLenum, GLuint);
|
void (GLAPIENTRY * BindBuffer) (GLenum, GLuint);
|
||||||
void (GLAPIENTRY *BindBufferBase)(GLenum, GLuint, GLuint);
|
void (GLAPIENTRY * BindBufferBase) (GLenum, GLuint, GLuint);
|
||||||
GLvoid * (GLAPIENTRY *MapBufferRange)(GLenum, GLintptr, GLsizeiptr,
|
GLvoid *(GLAPIENTRY * MapBufferRange) (GLenum, GLintptr, GLsizeiptr, GLbitfield);
|
||||||
GLbitfield);
|
GLboolean(GLAPIENTRY * UnmapBuffer) (GLenum);
|
||||||
GLboolean (GLAPIENTRY *UnmapBuffer)(GLenum);
|
void (GLAPIENTRY * BufferData) (GLenum, intptr_t, const GLvoid *, GLenum);
|
||||||
void (GLAPIENTRY *BufferData)(GLenum, intptr_t, const GLvoid *, GLenum);
|
void (GLAPIENTRY * ActiveTexture) (GLenum);
|
||||||
void (GLAPIENTRY *ActiveTexture)(GLenum);
|
void (GLAPIENTRY * BindTexture) (GLenum, GLuint);
|
||||||
void (GLAPIENTRY *BindTexture)(GLenum, GLuint);
|
int (GLAPIENTRY * SwapInterval) (int);
|
||||||
int (GLAPIENTRY *SwapInterval)(int);
|
void (GLAPIENTRY * TexImage3D) (GLenum, GLint, GLenum, GLsizei, GLsizei, GLsizei, GLint, GLenum, GLenum,
|
||||||
void (GLAPIENTRY *TexImage3D)(GLenum, GLint, GLenum, GLsizei, GLsizei,
|
|
||||||
GLsizei, GLint, GLenum, GLenum,
|
|
||||||
const GLvoid *);
|
const GLvoid *);
|
||||||
|
|
||||||
void (GLAPIENTRY *GenVertexArrays)(GLsizei, GLuint *);
|
void (GLAPIENTRY * GenVertexArrays) (GLsizei, GLuint *);
|
||||||
void (GLAPIENTRY *BindVertexArray)(GLuint);
|
void (GLAPIENTRY * BindVertexArray) (GLuint);
|
||||||
GLint (GLAPIENTRY *GetAttribLocation)(GLuint, const GLchar *);
|
GLint(GLAPIENTRY * GetAttribLocation) (GLuint, const GLchar *);
|
||||||
void (GLAPIENTRY *EnableVertexAttribArray)(GLuint);
|
void (GLAPIENTRY * EnableVertexAttribArray) (GLuint);
|
||||||
void (GLAPIENTRY *DisableVertexAttribArray)(GLuint);
|
void (GLAPIENTRY * DisableVertexAttribArray) (GLuint);
|
||||||
void (GLAPIENTRY *VertexAttribPointer)(GLuint, GLint, GLenum, GLboolean,
|
void (GLAPIENTRY * VertexAttribPointer) (GLuint, GLint, GLenum, GLboolean, GLsizei, const GLvoid *);
|
||||||
GLsizei, const GLvoid *);
|
void (GLAPIENTRY * DeleteVertexArrays) (GLsizei, const GLuint *);
|
||||||
void (GLAPIENTRY *DeleteVertexArrays)(GLsizei, const GLuint *);
|
void (GLAPIENTRY * UseProgram) (GLuint);
|
||||||
void (GLAPIENTRY *UseProgram)(GLuint);
|
GLint(GLAPIENTRY * GetUniformLocation) (GLuint, const GLchar *);
|
||||||
GLint (GLAPIENTRY *GetUniformLocation)(GLuint, const GLchar *);
|
void (GLAPIENTRY * CompileShader) (GLuint);
|
||||||
void (GLAPIENTRY *CompileShader)(GLuint);
|
GLuint(GLAPIENTRY * CreateProgram) (void);
|
||||||
GLuint (GLAPIENTRY *CreateProgram)(void);
|
GLuint(GLAPIENTRY * CreateShader) (GLenum);
|
||||||
GLuint (GLAPIENTRY *CreateShader)(GLenum);
|
void (GLAPIENTRY * ShaderSource) (GLuint, GLsizei, const GLchar **, const GLint *);
|
||||||
void (GLAPIENTRY *ShaderSource)(GLuint, GLsizei, const GLchar **,
|
void (GLAPIENTRY * LinkProgram) (GLuint);
|
||||||
const GLint *);
|
void (GLAPIENTRY * AttachShader) (GLuint, GLuint);
|
||||||
void (GLAPIENTRY *LinkProgram)(GLuint);
|
void (GLAPIENTRY * DeleteShader) (GLuint);
|
||||||
void (GLAPIENTRY *AttachShader)(GLuint, GLuint);
|
void (GLAPIENTRY * DeleteProgram) (GLuint);
|
||||||
void (GLAPIENTRY *DeleteShader)(GLuint);
|
void (GLAPIENTRY * GetShaderInfoLog) (GLuint, GLsizei, GLsizei *, GLchar *);
|
||||||
void (GLAPIENTRY *DeleteProgram)(GLuint);
|
void (GLAPIENTRY * GetShaderiv) (GLuint, GLenum, GLint *);
|
||||||
void (GLAPIENTRY *GetShaderInfoLog)(GLuint, GLsizei, GLsizei *, GLchar *);
|
void (GLAPIENTRY * GetProgramInfoLog) (GLuint, GLsizei, GLsizei *, GLchar *);
|
||||||
void (GLAPIENTRY *GetShaderiv)(GLuint, GLenum, GLint *);
|
void (GLAPIENTRY * GetProgramiv) (GLenum, GLenum, GLint *);
|
||||||
void (GLAPIENTRY *GetProgramInfoLog)(GLuint, GLsizei, GLsizei *, GLchar *);
|
const GLubyte *(GLAPIENTRY * GetStringi) (GLenum, GLuint);
|
||||||
void (GLAPIENTRY *GetProgramiv)(GLenum, GLenum, GLint *);
|
void (GLAPIENTRY * BindAttribLocation) (GLuint, GLuint, const GLchar *);
|
||||||
const GLubyte* (GLAPIENTRY *GetStringi)(GLenum, GLuint);
|
void (GLAPIENTRY * BindFramebuffer) (GLenum, GLuint);
|
||||||
void (GLAPIENTRY *BindAttribLocation)(GLuint, GLuint, const GLchar *);
|
void (GLAPIENTRY * GenFramebuffers) (GLsizei, GLuint *);
|
||||||
void (GLAPIENTRY *BindFramebuffer)(GLenum, GLuint);
|
void (GLAPIENTRY * DeleteFramebuffers) (GLsizei, const GLuint *);
|
||||||
void (GLAPIENTRY *GenFramebuffers)(GLsizei, GLuint *);
|
GLenum(GLAPIENTRY * CheckFramebufferStatus) (GLenum);
|
||||||
void (GLAPIENTRY *DeleteFramebuffers)(GLsizei, const GLuint *);
|
void (GLAPIENTRY * FramebufferTexture2D) (GLenum, GLenum, GLenum, GLuint, GLint);
|
||||||
GLenum (GLAPIENTRY *CheckFramebufferStatus)(GLenum);
|
void (GLAPIENTRY * BlitFramebuffer) (GLint, GLint, GLint, GLint, GLint, GLint, GLint, GLint, GLbitfield, GLenum);
|
||||||
void (GLAPIENTRY *FramebufferTexture2D)(GLenum, GLenum, GLenum, GLuint,
|
void (GLAPIENTRY * GetFramebufferAttachmentParameteriv) (GLenum, GLenum, GLenum, GLint *);
|
||||||
GLint);
|
|
||||||
void (GLAPIENTRY *BlitFramebuffer)(GLint, GLint, GLint, GLint, GLint, GLint,
|
|
||||||
GLint, GLint, GLbitfield, GLenum);
|
|
||||||
void (GLAPIENTRY *GetFramebufferAttachmentParameteriv)(GLenum, GLenum,
|
|
||||||
GLenum, GLint *);
|
|
||||||
|
|
||||||
void (GLAPIENTRY *Uniform1f)(GLint, GLfloat);
|
void (GLAPIENTRY * Uniform1f) (GLint, GLfloat);
|
||||||
void (GLAPIENTRY *Uniform2f)(GLint, GLfloat, GLfloat);
|
void (GLAPIENTRY * Uniform2f) (GLint, GLfloat, GLfloat);
|
||||||
void (GLAPIENTRY *Uniform3f)(GLint, GLfloat, GLfloat, GLfloat);
|
void (GLAPIENTRY * Uniform3f) (GLint, GLfloat, GLfloat, GLfloat);
|
||||||
void (GLAPIENTRY *Uniform4f)(GLint, GLfloat, GLfloat, GLfloat, GLfloat);
|
void (GLAPIENTRY * Uniform4f) (GLint, GLfloat, GLfloat, GLfloat, GLfloat);
|
||||||
void (GLAPIENTRY *Uniform1i)(GLint, GLint);
|
void (GLAPIENTRY * Uniform1i) (GLint, GLint);
|
||||||
void (GLAPIENTRY *UniformMatrix2fv)(GLint, GLsizei, GLboolean,
|
void (GLAPIENTRY * UniformMatrix2fv) (GLint, GLsizei, GLboolean, const GLfloat *);
|
||||||
const GLfloat *);
|
void (GLAPIENTRY * UniformMatrix3fv) (GLint, GLsizei, GLboolean, const GLfloat *);
|
||||||
void (GLAPIENTRY *UniformMatrix3fv)(GLint, GLsizei, GLboolean,
|
|
||||||
const GLfloat *);
|
|
||||||
|
|
||||||
void (GLAPIENTRY *InvalidateFramebuffer)(GLenum, GLsizei, const GLenum *);
|
void (GLAPIENTRY * InvalidateFramebuffer) (GLenum, GLsizei, const GLenum *);
|
||||||
|
|
||||||
GLsync (GLAPIENTRY *FenceSync)(GLenum, GLbitfield);
|
GLsync(GLAPIENTRY * FenceSync) (GLenum, GLbitfield);
|
||||||
GLenum (GLAPIENTRY *ClientWaitSync)(GLsync, GLbitfield, GLuint64);
|
GLenum(GLAPIENTRY * ClientWaitSync) (GLsync, GLbitfield, GLuint64);
|
||||||
void (GLAPIENTRY *DeleteSync)(GLsync sync);
|
void (GLAPIENTRY * DeleteSync) (GLsync sync);
|
||||||
|
|
||||||
void (GLAPIENTRY *GenQueries)(GLsizei, GLuint *);
|
void (GLAPIENTRY * GenQueries) (GLsizei, GLuint *);
|
||||||
void (GLAPIENTRY *DeleteQueries)(GLsizei, const GLuint *);
|
void (GLAPIENTRY * DeleteQueries) (GLsizei, const GLuint *);
|
||||||
void (GLAPIENTRY *BeginQuery)(GLenum, GLuint);
|
void (GLAPIENTRY * BeginQuery) (GLenum, GLuint);
|
||||||
void (GLAPIENTRY *EndQuery)(GLenum);
|
void (GLAPIENTRY * EndQuery) (GLenum);
|
||||||
void (GLAPIENTRY *QueryCounter)(GLuint, GLenum);
|
void (GLAPIENTRY * QueryCounter) (GLuint, GLenum);
|
||||||
GLboolean (GLAPIENTRY *IsQuery)(GLuint);
|
GLboolean(GLAPIENTRY * IsQuery) (GLuint);
|
||||||
void (GLAPIENTRY *GetQueryObjectiv)(GLuint, GLenum, GLint *);
|
void (GLAPIENTRY * GetQueryObjectiv) (GLuint, GLenum, GLint *);
|
||||||
void (GLAPIENTRY *GetQueryObjecti64v)(GLuint, GLenum, GLint64 *);
|
void (GLAPIENTRY * GetQueryObjecti64v) (GLuint, GLenum, GLint64 *);
|
||||||
void (GLAPIENTRY *GetQueryObjectuiv)(GLuint, GLenum, GLuint *);
|
void (GLAPIENTRY * GetQueryObjectuiv) (GLuint, GLenum, GLuint *);
|
||||||
void (GLAPIENTRY *GetQueryObjectui64v)(GLuint, GLenum, GLuint64 *);
|
void (GLAPIENTRY * GetQueryObjectui64v) (GLuint, GLenum, GLuint64 *);
|
||||||
|
|
||||||
void (GLAPIENTRY *VDPAUInitNV)(const GLvoid *, const GLvoid *);
|
void (GLAPIENTRY * VDPAUInitNV) (const GLvoid *, const GLvoid *);
|
||||||
void (GLAPIENTRY *VDPAUFiniNV)(void);
|
void (GLAPIENTRY * VDPAUFiniNV) (void);
|
||||||
GLvdpauSurfaceNV (GLAPIENTRY *VDPAURegisterOutputSurfaceNV)
|
GLvdpauSurfaceNV(GLAPIENTRY * VDPAURegisterOutputSurfaceNV)
|
||||||
(GLvoid *, GLenum, GLsizei, const GLuint *);
|
(GLvoid *, GLenum, GLsizei, const GLuint *);
|
||||||
GLvdpauSurfaceNV (GLAPIENTRY *VDPAURegisterVideoSurfaceNV)
|
GLvdpauSurfaceNV(GLAPIENTRY * VDPAURegisterVideoSurfaceNV)
|
||||||
(GLvoid *, GLenum, GLsizei, const GLuint *);
|
(GLvoid *, GLenum, GLsizei, const GLuint *);
|
||||||
void (GLAPIENTRY *VDPAUUnregisterSurfaceNV)(GLvdpauSurfaceNV);
|
void (GLAPIENTRY * VDPAUUnregisterSurfaceNV) (GLvdpauSurfaceNV);
|
||||||
void (GLAPIENTRY *VDPAUSurfaceAccessNV)(GLvdpauSurfaceNV, GLenum);
|
void (GLAPIENTRY * VDPAUSurfaceAccessNV) (GLvdpauSurfaceNV, GLenum);
|
||||||
void (GLAPIENTRY *VDPAUMapSurfacesNV)(GLsizei, const GLvdpauSurfaceNV *);
|
void (GLAPIENTRY * VDPAUMapSurfacesNV) (GLsizei, const GLvdpauSurfaceNV *);
|
||||||
void (GLAPIENTRY *VDPAUUnmapSurfacesNV)(GLsizei, const GLvdpauSurfaceNV *);
|
void (GLAPIENTRY * VDPAUUnmapSurfacesNV) (GLsizei, const GLvdpauSurfaceNV *);
|
||||||
|
|
||||||
#if HAVE_GL_WIN32
|
#if HAVE_GL_WIN32
|
||||||
// The HANDLE type might not be present on non-Win32
|
// The HANDLE type might not be present on non-Win32
|
||||||
BOOL (GLAPIENTRY *DXSetResourceShareHandleNV)(void *dxObject,
|
BOOL(GLAPIENTRY * DXSetResourceShareHandleNV) (void *dxObject, HANDLE shareHandle);
|
||||||
HANDLE shareHandle);
|
HANDLE(GLAPIENTRY * DXOpenDeviceNV) (void *dxDevice);
|
||||||
HANDLE (GLAPIENTRY *DXOpenDeviceNV)(void *dxDevice);
|
BOOL(GLAPIENTRY * DXCloseDeviceNV) (HANDLE hDevice);
|
||||||
BOOL (GLAPIENTRY *DXCloseDeviceNV)(HANDLE hDevice);
|
HANDLE(GLAPIENTRY * DXRegisterObjectNV) (HANDLE hDevice, void *dxObject, GLuint name, GLenum type, GLenum access);
|
||||||
HANDLE (GLAPIENTRY *DXRegisterObjectNV)(HANDLE hDevice, void *dxObject,
|
BOOL(GLAPIENTRY * DXUnregisterObjectNV) (HANDLE hDevice, HANDLE hObject);
|
||||||
GLuint name, GLenum type, GLenum access);
|
BOOL(GLAPIENTRY * DXLockObjectsNV) (HANDLE hDevice, GLint count, HANDLE * hObjects);
|
||||||
BOOL (GLAPIENTRY *DXUnregisterObjectNV)(HANDLE hDevice, HANDLE hObject);
|
BOOL(GLAPIENTRY * DXUnlockObjectsNV) (HANDLE hDevice, GLint count, HANDLE * hObjects);
|
||||||
BOOL (GLAPIENTRY *DXLockObjectsNV)(HANDLE hDevice, GLint count,
|
|
||||||
HANDLE *hObjects);
|
|
||||||
BOOL (GLAPIENTRY *DXUnlockObjectsNV)(HANDLE hDevice, GLint count,
|
|
||||||
HANDLE *hObjects);
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
GLint (GLAPIENTRY *GetVideoSync)(GLuint *);
|
GLint(GLAPIENTRY * GetVideoSync) (GLuint *);
|
||||||
GLint (GLAPIENTRY *WaitVideoSync)(GLint, GLint, unsigned int *);
|
GLint(GLAPIENTRY * WaitVideoSync) (GLint, GLint, unsigned int *);
|
||||||
|
|
||||||
void (GLAPIENTRY *GetTranslatedShaderSourceANGLE)(GLuint, GLsizei,
|
void (GLAPIENTRY * GetTranslatedShaderSourceANGLE) (GLuint, GLsizei, GLsizei *, GLchar * source);
|
||||||
GLsizei*, GLchar* source);
|
|
||||||
|
|
||||||
void (GLAPIENTRY *DebugMessageCallback)(MP_GLDEBUGPROC callback,
|
void (GLAPIENTRY * DebugMessageCallback) (MP_GLDEBUGPROC callback, const void *userParam);
|
||||||
const void *userParam);
|
|
||||||
|
|
||||||
void *(GLAPIENTRY *MPGetNativeDisplay)(const char *name);
|
void *(GLAPIENTRY * MPGetNativeDisplay) (const char *name);
|
||||||
};
|
};
|
||||||
|
|
||||||
#endif /* MPLAYER_GL_COMMON_H */
|
#endif /* MPLAYER_GL_COMMON_H */
|
||||||
|
2
config.h
2
config.h
@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
/*
|
/*
|
||||||
* This file is part of libplacebo.
|
* This file is part of libplacebo.
|
||||||
*
|
*
|
||||||
@ -39,5 +40,4 @@
|
|||||||
#define PL_HAVE_SHADERC 0
|
#define PL_HAVE_SHADERC 0
|
||||||
#define PL_HAVE_VULKAN 1
|
#define PL_HAVE_VULKAN 1
|
||||||
|
|
||||||
|
|
||||||
#endif // LIBPLACEBO_CONTEXT_H_
|
#endif // LIBPLACEBO_CONTEXT_H_
|
||||||
|
@ -1,3 +1,4 @@
|
|||||||
|
|
||||||
/*
|
/*
|
||||||
* Copyright 1993-2013 NVIDIA Corporation. All rights reserved.
|
* Copyright 1993-2013 NVIDIA Corporation. All rights reserved.
|
||||||
*
|
*
|
||||||
@ -28,76 +29,79 @@ typedef struct
|
|||||||
/**
|
/**
|
||||||
* Error codes
|
* Error codes
|
||||||
*/
|
*/
|
||||||
s_CudaErrorStr sCudaDrvErrorString[] =
|
s_CudaErrorStr sCudaDrvErrorString[] = {
|
||||||
{
|
|
||||||
/**
|
/**
|
||||||
* The API call returned with no errors. In the case of query calls, this
|
* The API call returned with no errors. In the case of query calls, this
|
||||||
* can also mean that the operation being queried is complete (see
|
* can also mean that the operation being queried is complete (see
|
||||||
* ::cuEventQuery() and ::cuStreamQuery()).
|
* ::cuEventQuery() and ::cuStreamQuery()).
|
||||||
*/
|
*/
|
||||||
{ "CUDA_SUCCESS", 0 },
|
{"CUDA_SUCCESS", 0},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that one or more of the parameters passed to the API call
|
* This indicates that one or more of the parameters passed to the API call
|
||||||
* is not within an acceptable range of values.
|
* is not within an acceptable range of values.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_INVALID_VALUE", 1 },
|
{"CUDA_ERROR_INVALID_VALUE", 1},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* The API call failed because it was unable to allocate enough memory to
|
* The API call failed because it was unable to allocate enough memory to
|
||||||
* perform the requested operation.
|
* perform the requested operation.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_OUT_OF_MEMORY", 2 },
|
{"CUDA_ERROR_OUT_OF_MEMORY", 2},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that the CUDA driver has not been initialized with
|
* This indicates that the CUDA driver has not been initialized with
|
||||||
* ::cuInit() or that initialization has failed.
|
* ::cuInit() or that initialization has failed.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_NOT_INITIALIZED", 3 },
|
{"CUDA_ERROR_NOT_INITIALIZED", 3},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that the CUDA driver is in the process of shutting down.
|
* This indicates that the CUDA driver is in the process of shutting down.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_DEINITIALIZED", 4 },
|
{"CUDA_ERROR_DEINITIALIZED", 4},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates profiling APIs are called while application is running
|
* This indicates profiling APIs are called while application is running
|
||||||
* in visual profiler mode.
|
* in visual profiler mode.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_PROFILER_DISABLED", 5 },
|
{"CUDA_ERROR_PROFILER_DISABLED", 5},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates profiling has not been initialized for this context.
|
* This indicates profiling has not been initialized for this context.
|
||||||
* Call cuProfilerInitialize() to resolve this.
|
* Call cuProfilerInitialize() to resolve this.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_PROFILER_NOT_INITIALIZED", 6 },
|
{"CUDA_ERROR_PROFILER_NOT_INITIALIZED", 6},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates profiler has already been started and probably
|
* This indicates profiler has already been started and probably
|
||||||
* cuProfilerStart() is incorrectly called.
|
* cuProfilerStart() is incorrectly called.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_PROFILER_ALREADY_STARTED", 7 },
|
{"CUDA_ERROR_PROFILER_ALREADY_STARTED", 7},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates profiler has already been stopped and probably
|
* This indicates profiler has already been stopped and probably
|
||||||
* cuProfilerStop() is incorrectly called.
|
* cuProfilerStop() is incorrectly called.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_PROFILER_ALREADY_STOPPED", 8 },
|
{"CUDA_ERROR_PROFILER_ALREADY_STOPPED", 8},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that no CUDA-capable devices were detected by the installed
|
* This indicates that no CUDA-capable devices were detected by the installed
|
||||||
* CUDA driver.
|
* CUDA driver.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_NO_DEVICE (no CUDA-capable devices were detected)", 100 },
|
{"CUDA_ERROR_NO_DEVICE (no CUDA-capable devices were detected)", 100},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that the device ordinal supplied by the user does not
|
* This indicates that the device ordinal supplied by the user does not
|
||||||
* correspond to a valid CUDA device.
|
* correspond to a valid CUDA device.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_INVALID_DEVICE (device specified is not a valid CUDA device)", 101 },
|
{"CUDA_ERROR_INVALID_DEVICE (device specified is not a valid CUDA device)", 101},
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that the device kernel image is invalid. This can also
|
* This indicates that the device kernel image is invalid. This can also
|
||||||
* indicate an invalid CUDA module.
|
* indicate an invalid CUDA module.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_INVALID_IMAGE", 200 },
|
{"CUDA_ERROR_INVALID_IMAGE", 200},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This most frequently indicates that there is no context bound to the
|
* This most frequently indicates that there is no context bound to the
|
||||||
@ -107,7 +111,7 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* mixes different API versions (i.e. 3010 context with 3020 API calls).
|
* mixes different API versions (i.e. 3010 context with 3020 API calls).
|
||||||
* See ::cuCtxGetApiVersion() for more details.
|
* See ::cuCtxGetApiVersion() for more details.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_INVALID_CONTEXT", 201 },
|
{"CUDA_ERROR_INVALID_CONTEXT", 201},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicated that the context being supplied as a parameter to the
|
* This indicated that the context being supplied as a parameter to the
|
||||||
@ -116,28 +120,28 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* This error return is deprecated as of CUDA 3.2. It is no longer an
|
* This error return is deprecated as of CUDA 3.2. It is no longer an
|
||||||
* error to attempt to push the active context via ::cuCtxPushCurrent().
|
* error to attempt to push the active context via ::cuCtxPushCurrent().
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_CONTEXT_ALREADY_CURRENT", 202 },
|
{"CUDA_ERROR_CONTEXT_ALREADY_CURRENT", 202},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that a map or register operation has failed.
|
* This indicates that a map or register operation has failed.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_MAP_FAILED", 205 },
|
{"CUDA_ERROR_MAP_FAILED", 205},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that an unmap or unregister operation has failed.
|
* This indicates that an unmap or unregister operation has failed.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_UNMAP_FAILED", 206 },
|
{"CUDA_ERROR_UNMAP_FAILED", 206},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that the specified array is currently mapped and thus
|
* This indicates that the specified array is currently mapped and thus
|
||||||
* cannot be destroyed.
|
* cannot be destroyed.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_ARRAY_IS_MAPPED", 207 },
|
{"CUDA_ERROR_ARRAY_IS_MAPPED", 207},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that the resource is already mapped.
|
* This indicates that the resource is already mapped.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_ALREADY_MAPPED", 208 },
|
{"CUDA_ERROR_ALREADY_MAPPED", 208},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that there is no kernel image available that is suitable
|
* This indicates that there is no kernel image available that is suitable
|
||||||
@ -145,115 +149,112 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* options for a particular CUDA source file that do not include the
|
* options for a particular CUDA source file that do not include the
|
||||||
* corresponding device configuration.
|
* corresponding device configuration.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_NO_BINARY_FOR_GPU", 209 },
|
{"CUDA_ERROR_NO_BINARY_FOR_GPU", 209},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that a resource has already been acquired.
|
* This indicates that a resource has already been acquired.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_ALREADY_ACQUIRED", 210 },
|
{"CUDA_ERROR_ALREADY_ACQUIRED", 210},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that a resource is not mapped.
|
* This indicates that a resource is not mapped.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_NOT_MAPPED", 211 },
|
{"CUDA_ERROR_NOT_MAPPED", 211},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that a mapped resource is not available for access as an
|
* This indicates that a mapped resource is not available for access as an
|
||||||
* array.
|
* array.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_NOT_MAPPED_AS_ARRAY", 212 },
|
{"CUDA_ERROR_NOT_MAPPED_AS_ARRAY", 212},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that a mapped resource is not available for access as a
|
* This indicates that a mapped resource is not available for access as a
|
||||||
* pointer.
|
* pointer.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_NOT_MAPPED_AS_POINTER", 213 },
|
{"CUDA_ERROR_NOT_MAPPED_AS_POINTER", 213},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that an uncorrectable ECC error was detected during
|
* This indicates that an uncorrectable ECC error was detected during
|
||||||
* execution.
|
* execution.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_ECC_UNCORRECTABLE", 214 },
|
{"CUDA_ERROR_ECC_UNCORRECTABLE", 214},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that the ::CUlimit passed to the API call is not
|
* This indicates that the ::CUlimit passed to the API call is not
|
||||||
* supported by the active device.
|
* supported by the active device.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_UNSUPPORTED_LIMIT", 215 },
|
{"CUDA_ERROR_UNSUPPORTED_LIMIT", 215},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that the ::CUcontext passed to the API call can
|
* This indicates that the ::CUcontext passed to the API call can
|
||||||
* only be bound to a single CPU thread at a time but is already
|
* only be bound to a single CPU thread at a time but is already
|
||||||
* bound to a CPU thread.
|
* bound to a CPU thread.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_CONTEXT_ALREADY_IN_USE", 216 },
|
{"CUDA_ERROR_CONTEXT_ALREADY_IN_USE", 216},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that peer access is not supported across the given
|
* This indicates that peer access is not supported across the given
|
||||||
* devices.
|
* devices.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_PEER_ACCESS_UNSUPPORTED", 217 },
|
{"CUDA_ERROR_PEER_ACCESS_UNSUPPORTED", 217},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that a PTX JIT compilation failed.
|
* This indicates that a PTX JIT compilation failed.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_INVALID_PTX", 218 },
|
{"CUDA_ERROR_INVALID_PTX", 218},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates an error with OpenGL or DirectX context.
|
* This indicates an error with OpenGL or DirectX context.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_INVALID_GRAPHICS_CONTEXT", 219 },
|
{"CUDA_ERROR_INVALID_GRAPHICS_CONTEXT", 219},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that an uncorrectable NVLink error was detected during the
|
* This indicates that an uncorrectable NVLink error was detected during the
|
||||||
* execution.
|
* execution.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_NVLINK_UNCORRECTABLE", 220 },
|
{"CUDA_ERROR_NVLINK_UNCORRECTABLE", 220},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that the PTX JIT compiler library was not found.
|
* This indicates that the PTX JIT compiler library was not found.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_JIT_COMPILER_NOT_FOUND", 221 },
|
{"CUDA_ERROR_JIT_COMPILER_NOT_FOUND", 221},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that the device kernel source is invalid.
|
* This indicates that the device kernel source is invalid.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_INVALID_SOURCE", 300 },
|
{"CUDA_ERROR_INVALID_SOURCE", 300},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that the file specified was not found.
|
* This indicates that the file specified was not found.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_FILE_NOT_FOUND", 301 },
|
{"CUDA_ERROR_FILE_NOT_FOUND", 301},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that a link to a shared object failed to resolve.
|
* This indicates that a link to a shared object failed to resolve.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_SHARED_OBJECT_SYMBOL_NOT_FOUND", 302 },
|
{"CUDA_ERROR_SHARED_OBJECT_SYMBOL_NOT_FOUND", 302},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that initialization of a shared object failed.
|
* This indicates that initialization of a shared object failed.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_SHARED_OBJECT_INIT_FAILED", 303 },
|
{"CUDA_ERROR_SHARED_OBJECT_INIT_FAILED", 303},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that an OS call failed.
|
* This indicates that an OS call failed.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_OPERATING_SYSTEM", 304 },
|
{"CUDA_ERROR_OPERATING_SYSTEM", 304},
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that a resource handle passed to the API call was not
|
* This indicates that a resource handle passed to the API call was not
|
||||||
* valid. Resource handles are opaque types like ::CUstream and ::CUevent.
|
* valid. Resource handles are opaque types like ::CUstream and ::CUevent.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_INVALID_HANDLE", 400 },
|
{"CUDA_ERROR_INVALID_HANDLE", 400},
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that a named symbol was not found. Examples of symbols
|
* This indicates that a named symbol was not found. Examples of symbols
|
||||||
* are global/constant variable names, texture names }, and surface names.
|
* are global/constant variable names, texture names }, and surface names.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_NOT_FOUND", 500 },
|
{"CUDA_ERROR_NOT_FOUND", 500},
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that asynchronous operations issued previously have not
|
* This indicates that asynchronous operations issued previously have not
|
||||||
@ -261,8 +262,7 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* differently than ::CUDA_SUCCESS (which indicates completion). Calls that
|
* differently than ::CUDA_SUCCESS (which indicates completion). Calls that
|
||||||
* may return this value include ::cuEventQuery() and ::cuStreamQuery().
|
* may return this value include ::cuEventQuery() and ::cuStreamQuery().
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_NOT_READY", 600 },
|
{"CUDA_ERROR_NOT_READY", 600},
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* While executing a kernel, the device encountered a
|
* While executing a kernel, the device encountered a
|
||||||
@ -271,7 +271,7 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* will return the same error. To continue using CUDA, the process must be terminated
|
* will return the same error. To continue using CUDA, the process must be terminated
|
||||||
* and relaunched.
|
* and relaunched.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_ILLEGAL_ADDRESS", 700 },
|
{"CUDA_ERROR_ILLEGAL_ADDRESS", 700},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that a launch did not occur because it did not have
|
* This indicates that a launch did not occur because it did not have
|
||||||
@ -282,7 +282,7 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* when a 32-bit int is expected) is equivalent to passing too many
|
* when a 32-bit int is expected) is equivalent to passing too many
|
||||||
* arguments and can also result in this error.
|
* arguments and can also result in this error.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_LAUNCH_OUT_OF_RESOURCES", 701 },
|
{"CUDA_ERROR_LAUNCH_OUT_OF_RESOURCES", 701},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that the device kernel took too long to execute. This can
|
* This indicates that the device kernel took too long to execute. This can
|
||||||
@ -293,40 +293,40 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* this context are invalid and must be reconstructed if the program is to
|
* this context are invalid and must be reconstructed if the program is to
|
||||||
* continue using CUDA.
|
* continue using CUDA.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_LAUNCH_TIMEOUT", 702 },
|
{"CUDA_ERROR_LAUNCH_TIMEOUT", 702},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This error indicates a kernel launch that uses an incompatible texturing
|
* This error indicates a kernel launch that uses an incompatible texturing
|
||||||
* mode.
|
* mode.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_LAUNCH_INCOMPATIBLE_TEXTURING", 703 },
|
{"CUDA_ERROR_LAUNCH_INCOMPATIBLE_TEXTURING", 703},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This error indicates that a call to ::cuCtxEnablePeerAccess() is
|
* This error indicates that a call to ::cuCtxEnablePeerAccess() is
|
||||||
* trying to re-enable peer access to a context which has already
|
* trying to re-enable peer access to a context which has already
|
||||||
* had peer access to it enabled.
|
* had peer access to it enabled.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_PEER_ACCESS_ALREADY_ENABLED", 704 },
|
{"CUDA_ERROR_PEER_ACCESS_ALREADY_ENABLED", 704},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This error indicates that ::cuCtxDisablePeerAccess() is
|
* This error indicates that ::cuCtxDisablePeerAccess() is
|
||||||
* trying to disable peer access which has not been enabled yet
|
* trying to disable peer access which has not been enabled yet
|
||||||
* via ::cuCtxEnablePeerAccess().
|
* via ::cuCtxEnablePeerAccess().
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_PEER_ACCESS_NOT_ENABLED", 705 },
|
{"CUDA_ERROR_PEER_ACCESS_NOT_ENABLED", 705},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This error indicates that the primary context for the specified device
|
* This error indicates that the primary context for the specified device
|
||||||
* has already been initialized.
|
* has already been initialized.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_PRIMARY_CONTEXT_ACTIVE", 708 },
|
{"CUDA_ERROR_PRIMARY_CONTEXT_ACTIVE", 708},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This error indicates that the context current to the calling thread
|
* This error indicates that the context current to the calling thread
|
||||||
* has been destroyed using ::cuCtxDestroy }, or is a primary context which
|
* has been destroyed using ::cuCtxDestroy }, or is a primary context which
|
||||||
* has not yet been initialized.
|
* has not yet been initialized.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_CONTEXT_IS_DESTROYED", 709 },
|
{"CUDA_ERROR_CONTEXT_IS_DESTROYED", 709},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* A device-side assert triggered during kernel execution. The context
|
* A device-side assert triggered during kernel execution. The context
|
||||||
@ -334,26 +334,26 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* memory allocations from this context are invalid and must be
|
* memory allocations from this context are invalid and must be
|
||||||
* reconstructed if the program is to continue using CUDA.
|
* reconstructed if the program is to continue using CUDA.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_ASSERT", 710 },
|
{"CUDA_ERROR_ASSERT", 710},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This error indicates that the hardware resources required to enable
|
* This error indicates that the hardware resources required to enable
|
||||||
* peer access have been exhausted for one or more of the devices
|
* peer access have been exhausted for one or more of the devices
|
||||||
* passed to ::cuCtxEnablePeerAccess().
|
* passed to ::cuCtxEnablePeerAccess().
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_TOO_MANY_PEERS", 711 },
|
{"CUDA_ERROR_TOO_MANY_PEERS", 711},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This error indicates that the memory range passed to ::cuMemHostRegister()
|
* This error indicates that the memory range passed to ::cuMemHostRegister()
|
||||||
* has already been registered.
|
* has already been registered.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_HOST_MEMORY_ALREADY_REGISTERED", 712 },
|
{"CUDA_ERROR_HOST_MEMORY_ALREADY_REGISTERED", 712},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This error indicates that the pointer passed to ::cuMemHostUnregister()
|
* This error indicates that the pointer passed to ::cuMemHostUnregister()
|
||||||
* does not correspond to any currently registered memory region.
|
* does not correspond to any currently registered memory region.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_HOST_MEMORY_NOT_REGISTERED", 713 },
|
{"CUDA_ERROR_HOST_MEMORY_NOT_REGISTERED", 713},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* While executing a kernel, the device encountered a stack error.
|
* While executing a kernel, the device encountered a stack error.
|
||||||
@ -362,7 +362,7 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* will return the same error. To continue using CUDA, the process must be terminated
|
* will return the same error. To continue using CUDA, the process must be terminated
|
||||||
* and relaunched.
|
* and relaunched.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_HARDWARE_STACK_ERROR", 714 },
|
{"CUDA_ERROR_HARDWARE_STACK_ERROR", 714},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* While executing a kernel, the device encountered an illegal instruction.
|
* While executing a kernel, the device encountered an illegal instruction.
|
||||||
@ -370,7 +370,7 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* will return the same error. To continue using CUDA, the process must be terminated
|
* will return the same error. To continue using CUDA, the process must be terminated
|
||||||
* and relaunched.
|
* and relaunched.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_ILLEGAL_INSTRUCTION", 715 },
|
{"CUDA_ERROR_ILLEGAL_INSTRUCTION", 715},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* While executing a kernel, the device encountered a load or store instruction
|
* While executing a kernel, the device encountered a load or store instruction
|
||||||
@ -379,7 +379,7 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* will return the same error. To continue using CUDA, the process must be terminated
|
* will return the same error. To continue using CUDA, the process must be terminated
|
||||||
* and relaunched.
|
* and relaunched.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_MISALIGNED_ADDRESS", 716 },
|
{"CUDA_ERROR_MISALIGNED_ADDRESS", 716},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* While executing a kernel, the device encountered an instruction
|
* While executing a kernel, the device encountered an instruction
|
||||||
@ -390,7 +390,7 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* will return the same error. To continue using CUDA, the process must be terminated
|
* will return the same error. To continue using CUDA, the process must be terminated
|
||||||
* and relaunched.
|
* and relaunched.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_INVALID_ADDRESS_SPACE", 717 },
|
{"CUDA_ERROR_INVALID_ADDRESS_SPACE", 717},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* While executing a kernel, the device program counter wrapped its address space.
|
* While executing a kernel, the device program counter wrapped its address space.
|
||||||
@ -398,7 +398,7 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* will return the same error. To continue using CUDA, the process must be terminated
|
* will return the same error. To continue using CUDA, the process must be terminated
|
||||||
* and relaunched.
|
* and relaunched.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_INVALID_PC", 718 },
|
{"CUDA_ERROR_INVALID_PC", 718},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* An exception occurred on the device while executing a kernel. Common
|
* An exception occurred on the device while executing a kernel. Common
|
||||||
@ -408,7 +408,7 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* memory allocations from this context are invalid and must be
|
* memory allocations from this context are invalid and must be
|
||||||
* reconstructed if the program is to continue using CUDA.
|
* reconstructed if the program is to continue using CUDA.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_LAUNCH_FAILED", 719 },
|
{"CUDA_ERROR_LAUNCH_FAILED", 719},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This error indicates that the number of blocks launched per grid for a kernel that was
|
* This error indicates that the number of blocks launched per grid for a kernel that was
|
||||||
@ -417,26 +417,24 @@ s_CudaErrorStr sCudaDrvErrorString[] =
|
|||||||
* or ::cuOccupancyMaxActiveBlocksPerMultiprocessorWithFlags times the number of multiprocessors
|
* or ::cuOccupancyMaxActiveBlocksPerMultiprocessorWithFlags times the number of multiprocessors
|
||||||
* as specified by the device attribute ::CU_DEVICE_ATTRIBUTE_MULTIPROCESSOR_COUNT.
|
* as specified by the device attribute ::CU_DEVICE_ATTRIBUTE_MULTIPROCESSOR_COUNT.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_COOPERATIVE_LAUNCH_TOO_LARGE", 720 },
|
{"CUDA_ERROR_COOPERATIVE_LAUNCH_TOO_LARGE", 720},
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This error indicates that the attempted operation is not permitted.
|
* This error indicates that the attempted operation is not permitted.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_NOT_PERMITTED", 800 },
|
{"CUDA_ERROR_NOT_PERMITTED", 800},
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This error indicates that the attempted operation is not supported
|
* This error indicates that the attempted operation is not supported
|
||||||
* on the current system or device.
|
* on the current system or device.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_NOT_SUPPORTED", 801 },
|
{"CUDA_ERROR_NOT_SUPPORTED", 801},
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This indicates that an unknown internal error has occurred.
|
* This indicates that an unknown internal error has occurred.
|
||||||
*/
|
*/
|
||||||
{ "CUDA_ERROR_UNKNOWN", 999 },
|
{"CUDA_ERROR_UNKNOWN", 999},
|
||||||
{ NULL, -1 }
|
{NULL, -1}
|
||||||
};
|
};
|
||||||
|
|
||||||
// This is just a linear search through the array, since the error_id's are not
|
// This is just a linear search through the array, since the error_id's are not
|
||||||
@ -445,9 +443,7 @@ static inline const char *getCudaDrvErrorString(CUresult error_id)
|
|||||||
{
|
{
|
||||||
int index = 0;
|
int index = 0;
|
||||||
|
|
||||||
while (sCudaDrvErrorString[index].error_id != error_id &&
|
while (sCudaDrvErrorString[index].error_id != error_id && (int)sCudaDrvErrorString[index].error_id != -1) {
|
||||||
(int)sCudaDrvErrorString[index].error_id != -1)
|
|
||||||
{
|
|
||||||
index++;
|
index++;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -459,5 +455,4 @@ static inline const char *getCudaDrvErrorString(CUresult error_id)
|
|||||||
|
|
||||||
#endif // __cuda_cuda_h__
|
#endif // __cuda_cuda_h__
|
||||||
|
|
||||||
|
|
||||||
#endif
|
#endif
|
||||||
|
10
misc.h
10
misc.h
@ -47,7 +47,7 @@ extern int SysLogLevel; ///< how much information wanted
|
|||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
|
|
||||||
static inline void Syslog(const int, const char *format, ...)
|
static inline void Syslog(const int, const char *format, ...)
|
||||||
__attribute__ ((format(printf, 2, 3)));
|
__attribute__((format(printf, 2, 3)));
|
||||||
|
|
||||||
//////////////////////////////////////////////////////////////////////////////
|
//////////////////////////////////////////////////////////////////////////////
|
||||||
// Inlines
|
// Inlines
|
||||||
@ -125,9 +125,8 @@ static inline const char *Timestamp2String(int64_t ts)
|
|||||||
return "--:--:--.---";
|
return "--:--:--.---";
|
||||||
}
|
}
|
||||||
idx = (idx + 1) % 3;
|
idx = (idx + 1) % 3;
|
||||||
snprintf(buf[idx], sizeof(buf[idx]), "%2d:%02d:%02d.%03d",
|
snprintf(buf[idx], sizeof(buf[idx]), "%2d:%02d:%02d.%03d", (int)(ts / (90 * 3600000)),
|
||||||
(int)(ts / (90 * 3600000)), (int)((ts / (90 * 60000)) % 60),
|
(int)((ts / (90 * 60000)) % 60), (int)((ts / (90 * 1000)) % 60), (int)((ts / 90) % 1000));
|
||||||
(int)((ts / (90 * 1000)) % 60), (int)((ts / 90) % 1000));
|
|
||||||
|
|
||||||
return buf[idx];
|
return buf[idx];
|
||||||
}
|
}
|
||||||
@ -153,6 +152,7 @@ static inline uint32_t GetMsTicks(void)
|
|||||||
return (tval.tv_sec * 1000) + (tval.tv_usec / 1000);
|
return (tval.tv_sec * 1000) + (tval.tv_usec / 1000);
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
static inline uint64_t GetusTicks(void)
|
static inline uint64_t GetusTicks(void)
|
||||||
{
|
{
|
||||||
|
|
||||||
@ -160,7 +160,7 @@ static inline uint64_t GetusTicks(void)
|
|||||||
struct timespec tspec;
|
struct timespec tspec;
|
||||||
|
|
||||||
clock_gettime(CLOCK_MONOTONIC, &tspec);
|
clock_gettime(CLOCK_MONOTONIC, &tspec);
|
||||||
return (uint64_t) (tspec.tv_sec * 1000000) + (tspec.tv_nsec) ;
|
return (uint64_t) (tspec.tv_sec * 1000000) + (tspec.tv_nsec);
|
||||||
#else
|
#else
|
||||||
struct timeval tval;
|
struct timeval tval;
|
||||||
|
|
||||||
|
@ -616,6 +616,7 @@ void cOglFb::BindWrite(void) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
void cOglFb::Unbind(void) {
|
void cOglFb::Unbind(void) {
|
||||||
|
glFinish();
|
||||||
glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
glBindFramebuffer(GL_FRAMEBUFFER, 0);
|
||||||
glBindTexture(GL_TEXTURE_2D, 0);
|
glBindTexture(GL_TEXTURE_2D, 0);
|
||||||
}
|
}
|
||||||
|
599
openglosd.h
599
openglosd.h
@ -20,22 +20,18 @@
|
|||||||
#define FT_ERRORDEF( e, v, s ) { e, s },
|
#define FT_ERRORDEF( e, v, s ) { e, s },
|
||||||
#define FT_ERROR_START_LIST {
|
#define FT_ERROR_START_LIST {
|
||||||
#define FT_ERROR_END_LIST { 0, 0 } };
|
#define FT_ERROR_END_LIST { 0, 0 } };
|
||||||
const struct {
|
const struct
|
||||||
|
{
|
||||||
int code;
|
int code;
|
||||||
const char* message;
|
const char *message;
|
||||||
} FT_Errors[] =
|
} FT_Errors[] =
|
||||||
#include FT_ERRORS_H
|
#include FT_ERRORS_H
|
||||||
|
|
||||||
|
|
||||||
#include <memory>
|
#include <memory>
|
||||||
#include <queue>
|
#include <queue>
|
||||||
|
|
||||||
#include <vdr/plugin.h>
|
#include <vdr/plugin.h>
|
||||||
#include <vdr/osd.h>
|
#include <vdr/osd.h>
|
||||||
#include <vdr/thread.h>
|
#include <vdr/thread.h>
|
||||||
|
|
||||||
#include "softhddev.h"
|
#include "softhddev.h"
|
||||||
|
|
||||||
extern "C"
|
extern "C"
|
||||||
{
|
{
|
||||||
#include <stdint.h>
|
#include <stdint.h>
|
||||||
@ -49,7 +45,8 @@ extern "C"
|
|||||||
|
|
||||||
extern "C" pthread_mutex_t OSDMutex;
|
extern "C" pthread_mutex_t OSDMutex;
|
||||||
|
|
||||||
struct sOglImage {
|
struct sOglImage
|
||||||
|
{
|
||||||
GLuint texture;
|
GLuint texture;
|
||||||
GLint width;
|
GLint width;
|
||||||
GLint height;
|
GLint height;
|
||||||
@ -60,43 +57,51 @@ struct sOglImage {
|
|||||||
* Helpers
|
* Helpers
|
||||||
****************************************************************************************/
|
****************************************************************************************/
|
||||||
|
|
||||||
void ConvertColor(const GLint &colARGB, glm::vec4 &col);
|
void ConvertColor(const GLint & colARGB, glm::vec4 & col);
|
||||||
|
|
||||||
/****************************************************************************************
|
/****************************************************************************************
|
||||||
* cShader
|
* cShader
|
||||||
****************************************************************************************/
|
****************************************************************************************/
|
||||||
enum eShaderType {
|
enum eShaderType
|
||||||
|
{
|
||||||
stRect,
|
stRect,
|
||||||
stTexture,
|
stTexture,
|
||||||
stText,
|
stText,
|
||||||
stCount
|
stCount
|
||||||
};
|
};
|
||||||
|
|
||||||
class cShader {
|
class cShader
|
||||||
private:
|
{
|
||||||
|
private:
|
||||||
eShaderType type;
|
eShaderType type;
|
||||||
GLuint id;
|
GLuint id;
|
||||||
bool Compile(const char *vertexCode, const char *fragmentCode);
|
bool Compile(const char *vertexCode, const char *fragmentCode);
|
||||||
bool CheckCompileErrors(GLuint object, bool program = false);
|
bool CheckCompileErrors(GLuint object, bool program = false);
|
||||||
public:
|
public:
|
||||||
cShader(void) {};
|
cShader(void)
|
||||||
virtual ~cShader(void) {};
|
{
|
||||||
|
};
|
||||||
|
virtual ~ cShader(void)
|
||||||
|
{
|
||||||
|
};
|
||||||
bool Load(eShaderType type);
|
bool Load(eShaderType type);
|
||||||
void Use(void);
|
void Use(void);
|
||||||
void SetFloat (const GLchar *name, GLfloat value);
|
void SetFloat(const GLchar * name, GLfloat value);
|
||||||
void SetInteger (const GLchar *name, GLint value);
|
void SetInteger(const GLchar * name, GLint value);
|
||||||
void SetVector2f (const GLchar *name, GLfloat x, GLfloat y);
|
void SetVector2f(const GLchar * name, GLfloat x, GLfloat y);
|
||||||
void SetVector3f (const GLchar *name, GLfloat x, GLfloat y, GLfloat z);
|
void SetVector3f(const GLchar * name, GLfloat x, GLfloat y, GLfloat z);
|
||||||
void SetVector4f (const GLchar *name, GLfloat x, GLfloat y, GLfloat z, GLfloat w);
|
void SetVector4f(const GLchar * name, GLfloat x, GLfloat y, GLfloat z, GLfloat w);
|
||||||
void SetMatrix4 (const GLchar *name, const glm::mat4 &matrix);
|
void SetMatrix4(const GLchar * name, const glm::mat4 & matrix);
|
||||||
};
|
};
|
||||||
|
|
||||||
/****************************************************************************************
|
/****************************************************************************************
|
||||||
* cOglGlyph
|
* cOglGlyph
|
||||||
****************************************************************************************/
|
****************************************************************************************/
|
||||||
class cOglGlyph : public cListObject {
|
class cOglGlyph:public cListObject
|
||||||
private:
|
{
|
||||||
struct tKerning {
|
private:
|
||||||
|
struct tKerning
|
||||||
|
{
|
||||||
public:
|
public:
|
||||||
tKerning(uint prevSym, GLfloat kerning = 0.0f) {
|
tKerning(uint prevSym, GLfloat kerning = 0.0f) {
|
||||||
this->prevSym = prevSym;
|
this->prevSym = prevSym;
|
||||||
@ -111,18 +116,38 @@ private:
|
|||||||
int width;
|
int width;
|
||||||
int height;
|
int height;
|
||||||
int advanceX;
|
int advanceX;
|
||||||
cVector<tKerning> kerningCache;
|
|
||||||
|
cVector < tKerning > kerningCache;
|
||||||
GLuint texture;
|
GLuint texture;
|
||||||
void LoadTexture(FT_BitmapGlyph ftGlyph);
|
void LoadTexture(FT_BitmapGlyph ftGlyph);
|
||||||
public:
|
|
||||||
|
public:
|
||||||
cOglGlyph(uint charCode, FT_BitmapGlyph ftGlyph);
|
cOglGlyph(uint charCode, FT_BitmapGlyph ftGlyph);
|
||||||
virtual ~cOglGlyph();
|
virtual ~ cOglGlyph();
|
||||||
uint CharCode(void) { return charCode; }
|
uint CharCode(void)
|
||||||
int AdvanceX(void) { return advanceX; }
|
{
|
||||||
int BearingLeft(void) const { return bearingLeft; }
|
return charCode;
|
||||||
int BearingTop(void) const { return bearingTop; }
|
}
|
||||||
int Width(void) const { return width; }
|
int AdvanceX(void)
|
||||||
int Height(void) const { return height; }
|
{
|
||||||
|
return advanceX;
|
||||||
|
}
|
||||||
|
int BearingLeft(void) const
|
||||||
|
{
|
||||||
|
return bearingLeft;
|
||||||
|
}
|
||||||
|
int BearingTop(void) const
|
||||||
|
{
|
||||||
|
return bearingTop;
|
||||||
|
}
|
||||||
|
int Width(void) const
|
||||||
|
{
|
||||||
|
return width;
|
||||||
|
}
|
||||||
|
int Height(void) const
|
||||||
|
{
|
||||||
|
return height;
|
||||||
|
}
|
||||||
int GetKerningCache(uint prevSym);
|
int GetKerningCache(uint prevSym);
|
||||||
void SetKerningCache(uint prevSym, int kerning);
|
void SetKerningCache(uint prevSym, int kerning);
|
||||||
void BindTexture(void);
|
void BindTexture(void);
|
||||||
@ -131,8 +156,9 @@ public:
|
|||||||
/****************************************************************************************
|
/****************************************************************************************
|
||||||
* cOglFont
|
* cOglFont
|
||||||
****************************************************************************************/
|
****************************************************************************************/
|
||||||
class cOglFont : public cListObject {
|
class cOglFont:public cListObject
|
||||||
private:
|
{
|
||||||
|
private:
|
||||||
static bool initiated;
|
static bool initiated;
|
||||||
cString name;
|
cString name;
|
||||||
int size;
|
int size;
|
||||||
@ -140,41 +166,57 @@ private:
|
|||||||
int bottom;
|
int bottom;
|
||||||
static FT_Library ftLib;
|
static FT_Library ftLib;
|
||||||
FT_Face face;
|
FT_Face face;
|
||||||
static cList<cOglFont> *fonts;
|
static cList < cOglFont > *fonts;
|
||||||
mutable cList<cOglGlyph> glyphCache;
|
mutable cList < cOglGlyph > glyphCache;
|
||||||
cOglFont(const char *fontName, int charHeight);
|
cOglFont(const char *fontName, int charHeight);
|
||||||
static void Init(void);
|
static void Init(void);
|
||||||
public:
|
public:
|
||||||
virtual ~cOglFont(void);
|
virtual ~ cOglFont(void);
|
||||||
static cOglFont *Get(const char *name, int charHeight);
|
static cOglFont *Get(const char *name, int charHeight);
|
||||||
static void Cleanup(void);
|
static void Cleanup(void);
|
||||||
const char *Name(void) { return *name; };
|
const char *Name(void)
|
||||||
int Size(void) { return size; };
|
{
|
||||||
int Bottom(void) {return bottom; };
|
return *name;
|
||||||
int Height(void) {return height; };
|
};
|
||||||
cOglGlyph* Glyph(uint charCode) const;
|
int Size(void)
|
||||||
int Kerning(cOglGlyph *glyph, uint prevSym) const;
|
{
|
||||||
|
return size;
|
||||||
|
};
|
||||||
|
int Bottom(void)
|
||||||
|
{
|
||||||
|
return bottom;
|
||||||
|
};
|
||||||
|
int Height(void)
|
||||||
|
{
|
||||||
|
return height;
|
||||||
|
};
|
||||||
|
cOglGlyph *Glyph(uint charCode) const;
|
||||||
|
int Kerning(cOglGlyph * glyph, uint prevSym) const;
|
||||||
};
|
};
|
||||||
|
|
||||||
/****************************************************************************************
|
/****************************************************************************************
|
||||||
* cOglFb
|
* cOglFb
|
||||||
* Framebuffer Object - OpenGL part of a Pixmap
|
* Framebuffer Object - OpenGL part of a Pixmap
|
||||||
****************************************************************************************/
|
****************************************************************************************/
|
||||||
class cOglFb {
|
class cOglFb
|
||||||
protected:
|
{
|
||||||
|
protected:
|
||||||
bool initiated;
|
bool initiated;
|
||||||
// GLuint fb;
|
// GLuint fb;
|
||||||
// GLuint texture;
|
// GLuint texture;
|
||||||
GLint width, height;
|
GLint width, height;
|
||||||
GLint viewPortWidth, viewPortHeight;
|
GLint viewPortWidth, viewPortHeight;
|
||||||
bool scrollable;
|
bool scrollable;
|
||||||
public:
|
public:
|
||||||
GLuint fb;
|
GLuint fb;
|
||||||
GLuint texture;
|
GLuint texture;
|
||||||
|
|
||||||
cOglFb(GLint width, GLint height, GLint viewPortWidth, GLint viewPortHeight);
|
cOglFb(GLint width, GLint height, GLint viewPortWidth, GLint viewPortHeight);
|
||||||
virtual ~cOglFb(void);
|
virtual ~ cOglFb(void);
|
||||||
bool Initiated(void) { return initiated; }
|
bool Initiated(void)
|
||||||
|
{
|
||||||
|
return initiated;
|
||||||
|
}
|
||||||
virtual bool Init(void);
|
virtual bool Init(void);
|
||||||
void Bind(void);
|
void Bind(void);
|
||||||
void BindRead(void);
|
void BindRead(void);
|
||||||
@ -182,27 +224,43 @@ public:
|
|||||||
virtual void Unbind(void);
|
virtual void Unbind(void);
|
||||||
bool BindTexture(void);
|
bool BindTexture(void);
|
||||||
void Blit(GLint destX1, GLint destY1, GLint destX2, GLint destY2);
|
void Blit(GLint destX1, GLint destY1, GLint destX2, GLint destY2);
|
||||||
GLint Width(void) { return width; };
|
GLint Width(void)
|
||||||
GLint Height(void) { return height; };
|
{
|
||||||
bool Scrollable(void) { return scrollable; };
|
return width;
|
||||||
GLint ViewportWidth(void) { return viewPortWidth; };
|
};
|
||||||
GLint ViewportHeight(void) { return viewPortHeight; };
|
GLint Height(void)
|
||||||
|
{
|
||||||
|
return height;
|
||||||
|
};
|
||||||
|
bool Scrollable(void)
|
||||||
|
{
|
||||||
|
return scrollable;
|
||||||
|
};
|
||||||
|
GLint ViewportWidth(void)
|
||||||
|
{
|
||||||
|
return viewPortWidth;
|
||||||
|
};
|
||||||
|
GLint ViewportHeight(void)
|
||||||
|
{
|
||||||
|
return viewPortHeight;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
/****************************************************************************************
|
/****************************************************************************************
|
||||||
* cOglOutputFb
|
* cOglOutputFb
|
||||||
* Output Framebuffer Object - holds Vdpau Output Surface which is our "output framebuffer"
|
* Output Framebuffer Object - holds Vdpau Output Surface which is our "output framebuffer"
|
||||||
****************************************************************************************/
|
****************************************************************************************/
|
||||||
class cOglOutputFb : public cOglFb {
|
class cOglOutputFb:public cOglFb
|
||||||
protected:
|
{
|
||||||
|
protected:
|
||||||
bool initiated;
|
bool initiated;
|
||||||
private:
|
private:
|
||||||
GLvdpauSurfaceNV surface;
|
GLvdpauSurfaceNV surface;
|
||||||
public:
|
public:
|
||||||
GLuint fb;
|
GLuint fb;
|
||||||
GLuint texture;
|
GLuint texture;
|
||||||
cOglOutputFb(GLint width, GLint height);
|
cOglOutputFb(GLint width, GLint height);
|
||||||
virtual ~cOglOutputFb(void);
|
virtual ~ cOglOutputFb(void);
|
||||||
virtual bool Init(void);
|
virtual bool Init(void);
|
||||||
virtual void BindWrite(void);
|
virtual void BindWrite(void);
|
||||||
virtual void Unbind(void);
|
virtual void Unbind(void);
|
||||||
@ -212,7 +270,8 @@ public:
|
|||||||
* cOglVb
|
* cOglVb
|
||||||
* Vertex Buffer - OpenGl Vertices for the different drawing commands
|
* Vertex Buffer - OpenGl Vertices for the different drawing commands
|
||||||
****************************************************************************************/
|
****************************************************************************************/
|
||||||
enum eVertexBufferType {
|
enum eVertexBufferType
|
||||||
|
{
|
||||||
vbRect,
|
vbRect,
|
||||||
vbEllipse,
|
vbEllipse,
|
||||||
vbSlope,
|
vbSlope,
|
||||||
@ -221,8 +280,9 @@ enum eVertexBufferType {
|
|||||||
vbCount
|
vbCount
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglVb {
|
class cOglVb
|
||||||
private:
|
{
|
||||||
|
private:
|
||||||
eVertexBufferType type;
|
eVertexBufferType type;
|
||||||
eShaderType shader;
|
eShaderType shader;
|
||||||
GLuint vao;
|
GLuint vao;
|
||||||
@ -231,9 +291,9 @@ private:
|
|||||||
int sizeVertex2;
|
int sizeVertex2;
|
||||||
int numVertices;
|
int numVertices;
|
||||||
GLuint drawMode;
|
GLuint drawMode;
|
||||||
public:
|
public:
|
||||||
cOglVb(int type);
|
cOglVb(int type);
|
||||||
virtual ~cOglVb(void);
|
virtual ~ cOglVb(void);
|
||||||
bool Init(void);
|
bool Init(void);
|
||||||
void Bind(void);
|
void Bind(void);
|
||||||
void Unbind(void);
|
void Unbind(void);
|
||||||
@ -243,99 +303,149 @@ public:
|
|||||||
void SetShaderColor(GLint color);
|
void SetShaderColor(GLint color);
|
||||||
void SetShaderAlpha(GLint alpha);
|
void SetShaderAlpha(GLint alpha);
|
||||||
void SetShaderProjectionMatrix(GLint width, GLint height);
|
void SetShaderProjectionMatrix(GLint width, GLint height);
|
||||||
void SetVertexData(GLfloat *vertices, int count = 0);
|
void SetVertexData(GLfloat * vertices, int count = 0);
|
||||||
void DrawArrays(int count = 0);
|
void DrawArrays(int count = 0);
|
||||||
};
|
};
|
||||||
|
|
||||||
/****************************************************************************************
|
/****************************************************************************************
|
||||||
* cOpenGLCmd
|
* cOpenGLCmd
|
||||||
****************************************************************************************/
|
****************************************************************************************/
|
||||||
class cOglCmd {
|
class cOglCmd
|
||||||
protected:
|
{
|
||||||
cOglFb *fb;
|
protected:
|
||||||
public:
|
cOglFb * fb;
|
||||||
cOglCmd(cOglFb *fb) { this->fb = fb; };
|
public:
|
||||||
virtual ~cOglCmd(void) {};
|
cOglCmd(cOglFb * fb)
|
||||||
virtual const char* Description(void) = 0;
|
{
|
||||||
|
this->fb = fb;
|
||||||
|
};
|
||||||
|
virtual ~ cOglCmd(void)
|
||||||
|
{
|
||||||
|
};
|
||||||
|
virtual const char *Description(void) = 0;
|
||||||
virtual bool Execute(void) = 0;
|
virtual bool Execute(void) = 0;
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdInitOutputFb : public cOglCmd {
|
class cOglCmdInitOutputFb:public cOglCmd
|
||||||
private:
|
{
|
||||||
cOglOutputFb *oFb;
|
private:
|
||||||
public:
|
cOglOutputFb * oFb;
|
||||||
cOglCmdInitOutputFb(cOglOutputFb *oFb);
|
public:
|
||||||
virtual ~cOglCmdInitOutputFb(void) {};
|
cOglCmdInitOutputFb(cOglOutputFb * oFb);
|
||||||
virtual const char* Description(void) { return "InitOutputFramebuffer"; }
|
virtual ~ cOglCmdInitOutputFb(void)
|
||||||
|
{
|
||||||
|
};
|
||||||
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "InitOutputFramebuffer";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdInitFb : public cOglCmd {
|
class cOglCmdInitFb:public cOglCmd
|
||||||
private:
|
{
|
||||||
cCondWait *wait;
|
private:
|
||||||
public:
|
cCondWait * wait;
|
||||||
cOglCmdInitFb(cOglFb *fb, cCondWait *wait = NULL);
|
public:
|
||||||
virtual ~cOglCmdInitFb(void) {};
|
cOglCmdInitFb(cOglFb * fb, cCondWait * wait = NULL);
|
||||||
virtual const char* Description(void) { return "InitFramebuffer"; }
|
virtual ~ cOglCmdInitFb(void)
|
||||||
|
{
|
||||||
|
};
|
||||||
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "InitFramebuffer";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdDeleteFb : public cOglCmd {
|
class cOglCmdDeleteFb:public cOglCmd
|
||||||
public:
|
{
|
||||||
cOglCmdDeleteFb(cOglFb *fb);
|
public:
|
||||||
virtual ~cOglCmdDeleteFb(void) {};
|
cOglCmdDeleteFb(cOglFb * fb);
|
||||||
virtual const char* Description(void) { return "DeleteFramebuffer"; }
|
virtual ~ cOglCmdDeleteFb(void)
|
||||||
|
{
|
||||||
|
};
|
||||||
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "DeleteFramebuffer";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdRenderFbToBufferFb : public cOglCmd {
|
class cOglCmdRenderFbToBufferFb:public cOglCmd
|
||||||
private:
|
{
|
||||||
cOglFb *buffer;
|
private:
|
||||||
|
cOglFb * buffer;
|
||||||
GLfloat x, y;
|
GLfloat x, y;
|
||||||
GLfloat drawPortX, drawPortY;
|
GLfloat drawPortX, drawPortY;
|
||||||
GLint transparency;
|
GLint transparency;
|
||||||
public:
|
public:
|
||||||
cOglCmdRenderFbToBufferFb(cOglFb *fb, cOglFb *buffer, GLint x, GLint y, GLint transparency, GLint drawPortX, GLint drawPortY);
|
cOglCmdRenderFbToBufferFb(cOglFb * fb, cOglFb * buffer, GLint x, GLint y, GLint transparency, GLint drawPortX,
|
||||||
virtual ~cOglCmdRenderFbToBufferFb(void) {};
|
GLint drawPortY);
|
||||||
virtual const char* Description(void) { return "Render Framebuffer to Buffer"; }
|
virtual ~ cOglCmdRenderFbToBufferFb(void)
|
||||||
|
{
|
||||||
|
};
|
||||||
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "Render Framebuffer to Buffer";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdCopyBufferToOutputFb : public cOglCmd {
|
class cOglCmdCopyBufferToOutputFb:public cOglCmd
|
||||||
private:
|
{
|
||||||
cOglOutputFb *oFb;
|
private:
|
||||||
|
cOglOutputFb * oFb;
|
||||||
GLint x, y;
|
GLint x, y;
|
||||||
public:
|
public:
|
||||||
cOglCmdCopyBufferToOutputFb(cOglFb *fb, cOglOutputFb *oFb, GLint x, GLint y);
|
cOglCmdCopyBufferToOutputFb(cOglFb * fb, cOglOutputFb * oFb, GLint x, GLint y);
|
||||||
virtual ~cOglCmdCopyBufferToOutputFb(void) {};
|
virtual ~ cOglCmdCopyBufferToOutputFb(void)
|
||||||
virtual const char* Description(void) { return "Copy buffer to OutputFramebuffer"; }
|
{
|
||||||
|
};
|
||||||
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "Copy buffer to OutputFramebuffer";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdFill : public cOglCmd {
|
class cOglCmdFill:public cOglCmd
|
||||||
private:
|
{
|
||||||
|
private:
|
||||||
GLint color;
|
GLint color;
|
||||||
public:
|
public:
|
||||||
cOglCmdFill(cOglFb *fb, GLint color);
|
cOglCmdFill(cOglFb * fb, GLint color);
|
||||||
virtual ~cOglCmdFill(void) {};
|
virtual ~ cOglCmdFill(void)
|
||||||
virtual const char* Description(void) { return "Fill"; }
|
{
|
||||||
|
};
|
||||||
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "Fill";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdDrawRectangle : public cOglCmd {
|
class cOglCmdDrawRectangle:public cOglCmd
|
||||||
private:
|
{
|
||||||
|
private:
|
||||||
GLint x, y;
|
GLint x, y;
|
||||||
GLint width, height;
|
GLint width, height;
|
||||||
GLint color;
|
GLint color;
|
||||||
public:
|
public:
|
||||||
cOglCmdDrawRectangle(cOglFb *fb, GLint x, GLint y, GLint width, GLint height, GLint color);
|
cOglCmdDrawRectangle(cOglFb * fb, GLint x, GLint y, GLint width, GLint height, GLint color);
|
||||||
virtual ~cOglCmdDrawRectangle(void) {};
|
virtual ~ cOglCmdDrawRectangle(void)
|
||||||
virtual const char* Description(void) { return "DrawRectangle"; }
|
{
|
||||||
|
};
|
||||||
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "DrawRectangle";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdDrawEllipse : public cOglCmd {
|
class cOglCmdDrawEllipse:public cOglCmd
|
||||||
private:
|
{
|
||||||
|
private:
|
||||||
GLint x, y;
|
GLint x, y;
|
||||||
GLint width, height;
|
GLint width, height;
|
||||||
GLint color;
|
GLint color;
|
||||||
@ -343,84 +453,121 @@ private:
|
|||||||
GLfloat *CreateVerticesFull(int &numVertices);
|
GLfloat *CreateVerticesFull(int &numVertices);
|
||||||
GLfloat *CreateVerticesQuadrant(int &numVertices);
|
GLfloat *CreateVerticesQuadrant(int &numVertices);
|
||||||
GLfloat *CreateVerticesHalf(int &numVertices);
|
GLfloat *CreateVerticesHalf(int &numVertices);
|
||||||
public:
|
public:
|
||||||
cOglCmdDrawEllipse(cOglFb *fb, GLint x, GLint y, GLint width, GLint height, GLint color, GLint quadrants);
|
cOglCmdDrawEllipse(cOglFb * fb, GLint x, GLint y, GLint width, GLint height, GLint color, GLint quadrants);
|
||||||
virtual ~cOglCmdDrawEllipse(void) {};
|
virtual ~ cOglCmdDrawEllipse(void)
|
||||||
virtual const char* Description(void) { return "DrawEllipse"; }
|
{
|
||||||
|
};
|
||||||
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "DrawEllipse";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdDrawSlope : public cOglCmd {
|
class cOglCmdDrawSlope:public cOglCmd
|
||||||
private:
|
{
|
||||||
|
private:
|
||||||
GLint x, y;
|
GLint x, y;
|
||||||
GLint width, height;
|
GLint width, height;
|
||||||
GLint color;
|
GLint color;
|
||||||
GLint type;
|
GLint type;
|
||||||
public:
|
public:
|
||||||
cOglCmdDrawSlope(cOglFb *fb, GLint x, GLint y, GLint width, GLint height, GLint color, GLint type);
|
cOglCmdDrawSlope(cOglFb * fb, GLint x, GLint y, GLint width, GLint height, GLint color, GLint type);
|
||||||
virtual ~cOglCmdDrawSlope(void) {};
|
virtual ~ cOglCmdDrawSlope(void)
|
||||||
virtual const char* Description(void) { return "DrawSlope"; }
|
{
|
||||||
|
};
|
||||||
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "DrawSlope";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdDrawText : public cOglCmd {
|
class cOglCmdDrawText:public cOglCmd
|
||||||
private:
|
{
|
||||||
|
private:
|
||||||
GLint x, y;
|
GLint x, y;
|
||||||
GLint limitX;
|
GLint limitX;
|
||||||
GLint colorText;
|
GLint colorText;
|
||||||
cString fontName;
|
cString fontName;
|
||||||
int fontSize;
|
int fontSize;
|
||||||
unsigned int *symbols;
|
unsigned int *symbols;
|
||||||
public:
|
public:
|
||||||
cOglCmdDrawText(cOglFb *fb, GLint x, GLint y, unsigned int *symbols, GLint limitX, const char *name, int fontSize, tColor colorText);
|
cOglCmdDrawText(cOglFb * fb, GLint x, GLint y, unsigned int *symbols, GLint limitX, const char *name,
|
||||||
virtual ~cOglCmdDrawText(void);
|
int fontSize, tColor colorText);
|
||||||
virtual const char* Description(void) { return "DrawText"; }
|
virtual ~ cOglCmdDrawText(void);
|
||||||
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "DrawText";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdDrawImage : public cOglCmd {
|
class cOglCmdDrawImage:public cOglCmd
|
||||||
private:
|
{
|
||||||
tColor *argb;
|
private:
|
||||||
|
tColor * argb;
|
||||||
GLint x, y, width, height;
|
GLint x, y, width, height;
|
||||||
bool overlay;
|
bool overlay;
|
||||||
GLfloat scaleX, scaleY;
|
GLfloat scaleX, scaleY;
|
||||||
public:
|
public:
|
||||||
cOglCmdDrawImage(cOglFb *fb, tColor *argb, GLint width, GLint height, GLint x, GLint y, bool overlay = true, double scaleX = 1.0f, double scaleY = 1.0f);
|
cOglCmdDrawImage(cOglFb * fb, tColor * argb, GLint width, GLint height, GLint x, GLint y, bool overlay =
|
||||||
virtual ~cOglCmdDrawImage(void);
|
true, double scaleX = 1.0f, double scaleY = 1.0f);
|
||||||
virtual const char* Description(void) { return "Draw Image"; }
|
virtual ~ cOglCmdDrawImage(void);
|
||||||
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "Draw Image";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdDrawTexture : public cOglCmd {
|
class cOglCmdDrawTexture:public cOglCmd
|
||||||
private:
|
{
|
||||||
sOglImage *imageRef;
|
private:
|
||||||
|
sOglImage * imageRef;
|
||||||
GLint x, y;
|
GLint x, y;
|
||||||
public:
|
public:
|
||||||
cOglCmdDrawTexture(cOglFb *fb, sOglImage *imageRef, GLint x, GLint y);
|
cOglCmdDrawTexture(cOglFb * fb, sOglImage * imageRef, GLint x, GLint y);
|
||||||
virtual ~cOglCmdDrawTexture(void) {};
|
virtual ~ cOglCmdDrawTexture(void)
|
||||||
virtual const char* Description(void) { return "Draw Texture"; }
|
{
|
||||||
|
};
|
||||||
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "Draw Texture";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdStoreImage : public cOglCmd {
|
class cOglCmdStoreImage:public cOglCmd
|
||||||
private:
|
{
|
||||||
sOglImage *imageRef;
|
private:
|
||||||
|
sOglImage * imageRef;
|
||||||
tColor *data;
|
tColor *data;
|
||||||
public:
|
public:
|
||||||
cOglCmdStoreImage(sOglImage *imageRef, tColor *argb);
|
cOglCmdStoreImage(sOglImage * imageRef, tColor * argb);
|
||||||
virtual ~cOglCmdStoreImage(void);
|
virtual ~ cOglCmdStoreImage(void);
|
||||||
virtual const char* Description(void) { return "Store Image"; }
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "Store Image";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
class cOglCmdDropImage : public cOglCmd {
|
class cOglCmdDropImage:public cOglCmd
|
||||||
private:
|
{
|
||||||
sOglImage *imageRef;
|
private:
|
||||||
|
sOglImage * imageRef;
|
||||||
cCondWait *wait;
|
cCondWait *wait;
|
||||||
public:
|
public:
|
||||||
cOglCmdDropImage(sOglImage *imageRef, cCondWait *wait);
|
cOglCmdDropImage(sOglImage * imageRef, cCondWait * wait);
|
||||||
virtual ~cOglCmdDropImage(void) {};
|
virtual ~ cOglCmdDropImage(void)
|
||||||
virtual const char* Description(void) { return "Drop Image"; }
|
{
|
||||||
|
};
|
||||||
|
virtual const char *Description(void)
|
||||||
|
{
|
||||||
|
return "Drop Image";
|
||||||
|
}
|
||||||
virtual bool Execute(void);
|
virtual bool Execute(void);
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -430,12 +577,13 @@ public:
|
|||||||
#define OGL_MAX_OSDIMAGES 256
|
#define OGL_MAX_OSDIMAGES 256
|
||||||
#define OGL_CMDQUEUE_SIZE 100
|
#define OGL_CMDQUEUE_SIZE 100
|
||||||
|
|
||||||
class cOglThread : public cThread {
|
class cOglThread:public cThread
|
||||||
private:
|
{
|
||||||
cCondWait *startWait;
|
private:
|
||||||
|
cCondWait * startWait;
|
||||||
cCondWait *wait;
|
cCondWait *wait;
|
||||||
bool stalled;
|
bool stalled;
|
||||||
std::queue<cOglCmd*> commands;
|
std::queue < cOglCmd * >commands;
|
||||||
GLint maxTextureSize;
|
GLint maxTextureSize;
|
||||||
sOglImage imageCache[OGL_MAX_OSDIMAGES];
|
sOglImage imageCache[OGL_MAX_OSDIMAGES];
|
||||||
long memCached;
|
long memCached;
|
||||||
@ -449,73 +597,96 @@ private:
|
|||||||
void Cleanup(void);
|
void Cleanup(void);
|
||||||
int GetFreeSlot(void);
|
int GetFreeSlot(void);
|
||||||
void ClearSlot(int slot);
|
void ClearSlot(int slot);
|
||||||
protected:
|
protected:
|
||||||
virtual void Action(void);
|
virtual void Action(void);
|
||||||
public:
|
public:
|
||||||
cOglThread(cCondWait *startWait, int maxCacheSize);
|
cOglThread(cCondWait * startWait, int maxCacheSize);
|
||||||
virtual ~cOglThread();
|
virtual ~ cOglThread();
|
||||||
void Stop(void);
|
void Stop(void);
|
||||||
void DoCmd(cOglCmd* cmd);
|
void DoCmd(cOglCmd * cmd);
|
||||||
int StoreImage(const cImage &image);
|
int StoreImage(const cImage & image);
|
||||||
void DropImageData(int imageHandle);
|
void DropImageData(int imageHandle);
|
||||||
sOglImage *GetImageRef(int slot);
|
sOglImage *GetImageRef(int slot);
|
||||||
int MaxTextureSize(void) { return maxTextureSize; };
|
int MaxTextureSize(void)
|
||||||
|
{
|
||||||
|
return maxTextureSize;
|
||||||
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
/****************************************************************************************
|
/****************************************************************************************
|
||||||
* cOglPixmap
|
* cOglPixmap
|
||||||
****************************************************************************************/
|
****************************************************************************************/
|
||||||
class cOglPixmap : public cPixmap {
|
class cOglPixmap:public cPixmap
|
||||||
private:
|
{
|
||||||
cOglFb *fb;
|
private:
|
||||||
std::shared_ptr<cOglThread> oglThread;
|
cOglFb * fb;
|
||||||
|
std::shared_ptr < cOglThread > oglThread;
|
||||||
bool dirty;
|
bool dirty;
|
||||||
public:
|
public:
|
||||||
cOglPixmap(std::shared_ptr<cOglThread> oglThread, int Layer, const cRect &ViewPort, const cRect &DrawPort = cRect::Null);
|
cOglPixmap(std::shared_ptr < cOglThread > oglThread, int Layer, const cRect & ViewPort, const cRect & DrawPort =
|
||||||
virtual ~cOglPixmap(void);
|
cRect::Null);
|
||||||
cOglFb *Fb(void) { return fb; };
|
virtual ~ cOglPixmap(void);
|
||||||
int X(void) { return ViewPort().X(); };
|
cOglFb *Fb(void)
|
||||||
int Y(void) { return ViewPort().Y(); };
|
{
|
||||||
virtual bool IsDirty(void) { return dirty; }
|
return fb;
|
||||||
virtual void SetDirty(bool dirty = true) { this->dirty = dirty; }
|
};
|
||||||
|
int X(void)
|
||||||
|
{
|
||||||
|
return ViewPort().X();
|
||||||
|
};
|
||||||
|
int Y(void)
|
||||||
|
{
|
||||||
|
return ViewPort().Y();
|
||||||
|
};
|
||||||
|
virtual bool IsDirty(void)
|
||||||
|
{
|
||||||
|
return dirty;
|
||||||
|
}
|
||||||
|
virtual void SetDirty(bool dirty = true) {
|
||||||
|
this->dirty = dirty;
|
||||||
|
}
|
||||||
virtual void SetAlpha(int Alpha);
|
virtual void SetAlpha(int Alpha);
|
||||||
virtual void SetTile(bool Tile);
|
virtual void SetTile(bool Tile);
|
||||||
virtual void SetViewPort(const cRect &Rect);
|
virtual void SetViewPort(const cRect & Rect);
|
||||||
virtual void SetDrawPortPoint(const cPoint &Point, bool Dirty = true);
|
virtual void SetDrawPortPoint(const cPoint & Point, bool Dirty = true);
|
||||||
virtual void Clear(void);
|
virtual void Clear(void);
|
||||||
virtual void Fill(tColor Color);
|
virtual void Fill(tColor Color);
|
||||||
virtual void DrawImage(const cPoint &Point, const cImage &Image);
|
virtual void DrawImage(const cPoint & Point, const cImage & Image);
|
||||||
virtual void DrawImage(const cPoint &Point, int ImageHandle);
|
virtual void DrawImage(const cPoint & Point, int ImageHandle);
|
||||||
virtual void DrawPixel(const cPoint &Point, tColor Color);
|
virtual void DrawPixel(const cPoint & Point, tColor Color);
|
||||||
virtual void DrawBitmap(const cPoint &Point, const cBitmap &Bitmap, tColor ColorFg = 0, tColor ColorBg = 0, bool Overlay = false);
|
virtual void DrawBitmap(const cPoint & Point, const cBitmap & Bitmap, tColor ColorFg = 0, tColor ColorBg =
|
||||||
virtual void DrawText(const cPoint &Point, const char *s, tColor ColorFg, tColor ColorBg, const cFont *Font, int Width = 0, int Height = 0, int Alignment = taDefault);
|
0, bool Overlay = false);
|
||||||
virtual void DrawRectangle(const cRect &Rect, tColor Color);
|
virtual void DrawText(const cPoint & Point, const char *s, tColor ColorFg, tColor ColorBg, const cFont * Font,
|
||||||
virtual void DrawEllipse(const cRect &Rect, tColor Color, int Quadrants = 0);
|
int Width = 0, int Height = 0, int Alignment = taDefault);
|
||||||
virtual void DrawSlope(const cRect &Rect, tColor Color, int Type);
|
virtual void DrawRectangle(const cRect & Rect, tColor Color);
|
||||||
virtual void Render(const cPixmap *Pixmap, const cRect &Source, const cPoint &Dest);
|
virtual void DrawEllipse(const cRect & Rect, tColor Color, int Quadrants = 0);
|
||||||
virtual void Copy(const cPixmap *Pixmap, const cRect &Source, const cPoint &Dest);
|
virtual void DrawSlope(const cRect & Rect, tColor Color, int Type);
|
||||||
virtual void Scroll(const cPoint &Dest, const cRect &Source = cRect::Null);
|
virtual void Render(const cPixmap * Pixmap, const cRect & Source, const cPoint & Dest);
|
||||||
virtual void Pan(const cPoint &Dest, const cRect &Source = cRect::Null);
|
virtual void Copy(const cPixmap * Pixmap, const cRect & Source, const cPoint & Dest);
|
||||||
|
virtual void Scroll(const cPoint & Dest, const cRect & Source = cRect::Null);
|
||||||
|
virtual void Pan(const cPoint & Dest, const cRect & Source = cRect::Null);
|
||||||
};
|
};
|
||||||
|
|
||||||
/******************************************************************************
|
/******************************************************************************
|
||||||
* cOglOsd
|
* cOglOsd
|
||||||
******************************************************************************/
|
******************************************************************************/
|
||||||
class cOglOsd : public cOsd {
|
class cOglOsd:public cOsd
|
||||||
private:
|
{
|
||||||
cOglFb *bFb;
|
private:
|
||||||
std::shared_ptr<cOglThread> oglThread;
|
cOglFb * bFb;
|
||||||
cVector<cOglPixmap *> oglPixmaps;
|
std::shared_ptr < cOglThread > oglThread;
|
||||||
|
cVector < cOglPixmap * >oglPixmaps;
|
||||||
bool isSubtitleOsd;
|
bool isSubtitleOsd;
|
||||||
protected:
|
protected:
|
||||||
public:
|
public:
|
||||||
cOglOsd(int Left, int Top, uint Level, std::shared_ptr<cOglThread> oglThread);
|
cOglOsd(int Left, int Top, uint Level, std::shared_ptr < cOglThread > oglThread);
|
||||||
virtual ~cOglOsd();
|
virtual ~ cOglOsd();
|
||||||
virtual eOsdError SetAreas(const tArea *Areas, int NumAreas);
|
virtual eOsdError SetAreas(const tArea * Areas, int NumAreas);
|
||||||
virtual cPixmap *CreatePixmap(int Layer, const cRect &ViewPort, const cRect &DrawPort = cRect::Null);
|
virtual cPixmap *CreatePixmap(int Layer, const cRect & ViewPort, const cRect & DrawPort = cRect::Null);
|
||||||
virtual void DestroyPixmap(cPixmap *Pixmap);
|
virtual void DestroyPixmap(cPixmap * Pixmap);
|
||||||
virtual void Flush(void);
|
virtual void Flush(void);
|
||||||
virtual void DrawScaledBitmap(int x, int y, const cBitmap &Bitmap, double FactorX, double FactorY, bool AntiAlias = false);
|
virtual void DrawScaledBitmap(int x, int y, const cBitmap & Bitmap, double FactorX, double FactorY,
|
||||||
|
bool AntiAlias = false);
|
||||||
static cOglOutputFb *oFb;
|
static cOglOutputFb *oFb;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
37
po/de_DE.po
37
po/de_DE.po
@ -7,7 +7,7 @@ msgid ""
|
|||||||
msgstr ""
|
msgstr ""
|
||||||
"Project-Id-Version: VDR \n"
|
"Project-Id-Version: VDR \n"
|
||||||
"Report-Msgid-Bugs-To: <see README>\n"
|
"Report-Msgid-Bugs-To: <see README>\n"
|
||||||
"POT-Creation-Date: 2019-10-04 14:23+0200\n"
|
"POT-Creation-Date: 2019-10-26 18:41+0200\n"
|
||||||
"PO-Revision-Date: blabla\n"
|
"PO-Revision-Date: blabla\n"
|
||||||
"Last-Translator: blabla\n"
|
"Last-Translator: blabla\n"
|
||||||
"Language-Team: blabla\n"
|
"Language-Team: blabla\n"
|
||||||
@ -792,7 +792,13 @@ msgstr ""
|
|||||||
msgid "[softhddev] ready%s\n"
|
msgid "[softhddev] ready%s\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "video/egl: GlxSetupWindow can't make egl context current\n"
|
msgid "video: can't lock thread\n"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgid "video: can't unlock thread\n"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
|
msgid "video/egl: GlxSetupWindow can't make egl/glx context current\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "video/glx: no v-sync\n"
|
msgid "video/glx: no v-sync\n"
|
||||||
@ -891,21 +897,6 @@ msgstr ""
|
|||||||
msgid "Failed rendering frame!\n"
|
msgid "Failed rendering frame!\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
#, c-format
|
|
||||||
msgid "video/vdpau: can't get video surface parameters: %s\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "video/vdpau: out of memory\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#, c-format
|
|
||||||
msgid "video/vdpau: unsupported chroma type %d\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#, c-format
|
|
||||||
msgid "video/vdpau: can't get video surface bits: %s\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
#, c-format
|
#, c-format
|
||||||
msgid "video/vdpau: output buffer full, dropping frame (%d/%d)\n"
|
msgid "video/vdpau: output buffer full, dropping frame (%d/%d)\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
@ -924,9 +915,6 @@ msgstr ""
|
|||||||
msgid "video: decoder buffer empty, duping frame (%d/%d) %d v-buf\n"
|
msgid "video: decoder buffer empty, duping frame (%d/%d) %d v-buf\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "Failed creating vulkan swapchain!"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "video: fatal i/o error\n"
|
msgid "video: fatal i/o error\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
@ -934,12 +922,6 @@ msgstr ""
|
|||||||
msgid "video/event: No symbol for %d\n"
|
msgid "video/event: No symbol for %d\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
msgid "video: can't lock thread\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "video: can't unlock thread\n"
|
|
||||||
msgstr ""
|
|
||||||
|
|
||||||
msgid "Cant get memory for PLACEBO struct"
|
msgid "Cant get memory for PLACEBO struct"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
@ -952,6 +934,9 @@ msgstr ""
|
|||||||
msgid "Failed to create Vulkan Device"
|
msgid "Failed to create Vulkan Device"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
msgid "Failed creating vulkan swapchain!"
|
||||||
|
msgstr ""
|
||||||
|
|
||||||
msgid "Failed initializing libplacebo renderer\n"
|
msgid "Failed initializing libplacebo renderer\n"
|
||||||
msgstr ""
|
msgstr ""
|
||||||
|
|
||||||
|
260
shaders.h
260
shaders.h
@ -1,7 +1,7 @@
|
|||||||
|
|
||||||
// shader
|
// shader
|
||||||
#ifdef CUVID
|
#ifdef CUVID
|
||||||
char vertex_osd[] = {"\
|
char vertex_osd[] = { "\
|
||||||
#version 330\n\
|
#version 330\n\
|
||||||
in vec2 vertex_position;\n\
|
in vec2 vertex_position;\n\
|
||||||
in vec2 vertex_texcoord0;\n\
|
in vec2 vertex_texcoord0;\n\
|
||||||
@ -9,9 +9,9 @@ out vec2 texcoord0;\n\
|
|||||||
void main() {\n\
|
void main() {\n\
|
||||||
gl_Position = vec4(vertex_position, 1.0, 1.0);\n\
|
gl_Position = vec4(vertex_position, 1.0, 1.0);\n\
|
||||||
texcoord0 = vertex_texcoord0;\n\
|
texcoord0 = vertex_texcoord0;\n\
|
||||||
}\n"};
|
}\n" };
|
||||||
|
|
||||||
char fragment_osd[] = {"\
|
char fragment_osd[] = { "\
|
||||||
#version 330\n\
|
#version 330\n\
|
||||||
#define texture1D texture\n\
|
#define texture1D texture\n\
|
||||||
precision mediump float; \
|
precision mediump float; \
|
||||||
@ -22,9 +22,9 @@ void main() {\n\
|
|||||||
vec4 color; \n\
|
vec4 color; \n\
|
||||||
color = vec4(texture(texture0, texcoord0));\n\
|
color = vec4(texture(texture0, texcoord0));\n\
|
||||||
out_color = color;\n\
|
out_color = color;\n\
|
||||||
}\n"};
|
}\n" };
|
||||||
|
|
||||||
char vertex[] = {"\
|
char vertex[] = { "\
|
||||||
#version 310 es\n\
|
#version 310 es\n\
|
||||||
in vec2 vertex_position;\n\
|
in vec2 vertex_position;\n\
|
||||||
in vec2 vertex_texcoord0;\n\
|
in vec2 vertex_texcoord0;\n\
|
||||||
@ -35,9 +35,9 @@ void main() {\n\
|
|||||||
gl_Position = vec4(vertex_position, 1.0, 1.0);\n\
|
gl_Position = vec4(vertex_position, 1.0, 1.0);\n\
|
||||||
texcoord0 = vertex_texcoord0;\n\
|
texcoord0 = vertex_texcoord0;\n\
|
||||||
texcoord1 = vertex_texcoord1;\n\
|
texcoord1 = vertex_texcoord1;\n\
|
||||||
}\n"};
|
}\n" };
|
||||||
|
|
||||||
char fragment[] = {"\
|
char fragment[] = { "\
|
||||||
#version 310 es\n\
|
#version 310 es\n\
|
||||||
#define texture1D texture\n\
|
#define texture1D texture\n\
|
||||||
#define texture3D texture\n\
|
#define texture3D texture\n\
|
||||||
@ -58,9 +58,9 @@ color.rgb = mat3(colormatrix) * color.rgb + colormatrix_c;\n\
|
|||||||
color.a = 1.0;\n\
|
color.a = 1.0;\n\
|
||||||
// color mapping\n\
|
// color mapping\n\
|
||||||
out_color = color;\n\
|
out_color = color;\n\
|
||||||
}\n"};
|
}\n" };
|
||||||
|
|
||||||
char fragment_bt2100[] = {"\
|
char fragment_bt2100[] = { "\
|
||||||
#version 310 es\n \
|
#version 310 es\n \
|
||||||
#define texture1D texture\n\
|
#define texture1D texture\n\
|
||||||
#define texture3D texture\n\
|
#define texture3D texture\n\
|
||||||
@ -88,11 +88,10 @@ color.rgb = cms_matrix * color.rgb;\n\
|
|||||||
color.rgb = clamp(color.rgb, 0.0, 1.0);\n\
|
color.rgb = clamp(color.rgb, 0.0, 1.0);\n\
|
||||||
color.rgb = pow(color.rgb, vec3(1.0/2.4));\n\
|
color.rgb = pow(color.rgb, vec3(1.0/2.4));\n\
|
||||||
out_color = color;\n\
|
out_color = color;\n\
|
||||||
}\n"};
|
}\n" };
|
||||||
|
|
||||||
|
|
||||||
#else
|
#else
|
||||||
char vertex_osd[] = {"\
|
char vertex_osd[] = { "\
|
||||||
\n\
|
\n\
|
||||||
in vec2 vertex_position;\n\
|
in vec2 vertex_position;\n\
|
||||||
in vec2 vertex_texcoord0;\n\
|
in vec2 vertex_texcoord0;\n\
|
||||||
@ -100,9 +99,9 @@ out vec2 texcoord0;\n\
|
|||||||
void main() {\n\
|
void main() {\n\
|
||||||
gl_Position = vec4(vertex_position, 1.0, 1.0);\n\
|
gl_Position = vec4(vertex_position, 1.0, 1.0);\n\
|
||||||
texcoord0 = vertex_texcoord0;\n\
|
texcoord0 = vertex_texcoord0;\n\
|
||||||
}\n"};
|
}\n" };
|
||||||
|
|
||||||
char fragment_osd[] = {"\
|
char fragment_osd[] = { "\
|
||||||
\n\
|
\n\
|
||||||
#define texture1D texture\n\
|
#define texture1D texture\n\
|
||||||
precision mediump float; \
|
precision mediump float; \
|
||||||
@ -113,9 +112,9 @@ void main() {\n\
|
|||||||
vec4 color; \n\
|
vec4 color; \n\
|
||||||
color = vec4(texture(texture0, texcoord0));\n\
|
color = vec4(texture(texture0, texcoord0));\n\
|
||||||
out_color = color;\n\
|
out_color = color;\n\
|
||||||
}\n"};
|
}\n" };
|
||||||
|
|
||||||
char vertex[] = {"\
|
char vertex[] = { "\
|
||||||
\n\
|
\n\
|
||||||
in vec2 vertex_position;\n\
|
in vec2 vertex_position;\n\
|
||||||
in vec2 vertex_texcoord0;\n\
|
in vec2 vertex_texcoord0;\n\
|
||||||
@ -126,9 +125,9 @@ void main() {\n\
|
|||||||
gl_Position = vec4(vertex_position, 1.0, 1.0);\n\
|
gl_Position = vec4(vertex_position, 1.0, 1.0);\n\
|
||||||
texcoord0 = vertex_texcoord0;\n\
|
texcoord0 = vertex_texcoord0;\n\
|
||||||
texcoord1 = vertex_texcoord1;\n\
|
texcoord1 = vertex_texcoord1;\n\
|
||||||
}\n"};
|
}\n" };
|
||||||
|
|
||||||
char fragment[] = {"\
|
char fragment[] = { "\
|
||||||
\n\
|
\n\
|
||||||
#define texture1D texture\n\
|
#define texture1D texture\n\
|
||||||
#define texture3D texture\n\
|
#define texture3D texture\n\
|
||||||
@ -150,9 +149,9 @@ color.rgb = mat3(colormatrix) * color.rgb + colormatrix_c;\n\
|
|||||||
color.a = 1.0;\n\
|
color.a = 1.0;\n\
|
||||||
// color mapping\n\
|
// color mapping\n\
|
||||||
out_color = color;\n\
|
out_color = color;\n\
|
||||||
}\n"};
|
}\n" };
|
||||||
|
|
||||||
char fragment_bt2100[] = {"\
|
char fragment_bt2100[] = { "\
|
||||||
\n \
|
\n \
|
||||||
#define texture1D texture\n\
|
#define texture1D texture\n\
|
||||||
#define texture3D texture\n\
|
#define texture3D texture\n\
|
||||||
@ -180,8 +179,9 @@ color.rgb = cms_matrix * color.rgb;\n\
|
|||||||
color.rgb = clamp(color.rgb, 0.0, 1.0);\n\
|
color.rgb = clamp(color.rgb, 0.0, 1.0);\n\
|
||||||
color.rgb = pow(color.rgb, vec3(1.0/2.4));\n\
|
color.rgb = pow(color.rgb, vec3(1.0/2.4));\n\
|
||||||
out_color = color;\n\
|
out_color = color;\n\
|
||||||
}\n"};
|
}\n" };
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
/* Color conversion matrix: RGB = m * YUV + c
|
/* Color conversion matrix: RGB = m * YUV + c
|
||||||
* m is in row-major matrix, with m[row][col], e.g.:
|
* m is in row-major matrix, with m[row][col], e.g.:
|
||||||
* [ a11 a12 a13 ] float m[3][3] = { { a11, a12, a13 },
|
* [ a11 a12 a13 ] float m[3][3] = { { a11, a12, a13 },
|
||||||
@ -195,50 +195,53 @@ out_color = color;\n\
|
|||||||
* is the Y vector (1, 1, 1), the 2nd is the U vector, the 3rd the V vector.
|
* is the Y vector (1, 1, 1), the 2nd is the U vector, the 3rd the V vector.
|
||||||
* The matrix might also be used for other conversions and colorspaces.
|
* The matrix might also be used for other conversions and colorspaces.
|
||||||
*/
|
*/
|
||||||
struct mp_cmat {
|
struct mp_cmat
|
||||||
|
{
|
||||||
GLfloat m[3][3]; // colormatrix
|
GLfloat m[3][3]; // colormatrix
|
||||||
GLfloat c[3]; //colormatrix_c
|
GLfloat c[3]; //colormatrix_c
|
||||||
};
|
};
|
||||||
|
|
||||||
struct mp_mat {
|
struct mp_mat
|
||||||
|
{
|
||||||
GLfloat m[3][3];
|
GLfloat m[3][3];
|
||||||
};
|
};
|
||||||
|
|
||||||
// YUV input limited range (16-235 for luma, 16-240 for chroma)
|
// YUV input limited range (16-235 for luma, 16-240 for chroma)
|
||||||
// ITU-R BT.601 (SD)
|
// ITU-R BT.601 (SD)
|
||||||
struct mp_cmat yuv_bt601 = {\
|
struct mp_cmat yuv_bt601 = { {{1.164384, 1.164384, 1.164384},
|
||||||
{{ 1.164384, 1.164384, 1.164384 },\
|
{0.00000, -0.391762, 2.017232},
|
||||||
{ 0.00000, -0.391762, 2.017232 },\
|
{1.596027, -0.812968, 0.000000}},
|
||||||
{ 1.596027, -0.812968 , 0.000000 }},\
|
{-0.874202, 0.531668, -1.085631}
|
||||||
{-0.874202, 0.531668, -1.085631 } };
|
};
|
||||||
|
|
||||||
// ITU-R BT.709 (HD)
|
// ITU-R BT.709 (HD)
|
||||||
struct mp_cmat yuv_bt709 = {\
|
struct mp_cmat yuv_bt709 = { {{1.164384, 1.164384, 1.164384},
|
||||||
{{ 1.164384, 1.164384, 1.164384 },\
|
{0.00000, -0.213249, 2.112402},
|
||||||
{ 0.00000, -0.213249, 2.112402 },\
|
{1.792741, -0.532909, 0.000000}},
|
||||||
{ 1.792741, -0.532909 , 0.000000 }},\
|
{-0.972945, 0.301483, -1.133402}
|
||||||
{-0.972945, 0.301483, -1.133402 } };
|
};
|
||||||
|
|
||||||
// ITU-R BT.2020 non-constant luminance system
|
// ITU-R BT.2020 non-constant luminance system
|
||||||
struct mp_cmat yuv_bt2020ncl = {\
|
struct mp_cmat yuv_bt2020ncl = { {{1.164384, 1.164384, 1.164384},
|
||||||
{{ 1.164384, 1.164384, 1.164384 },\
|
{0.00000, -0.187326, 2.141772},
|
||||||
{ 0.00000, -0.187326, 2.141772 },\
|
{1.678674, -0.650424, 0.000000}},
|
||||||
{ 1.678674, -0.650424 , 0.000000 }},\
|
{-0.915688, 0.347459, -1.148145}
|
||||||
{-0.915688, 0.347459, -1.148145 } };
|
};
|
||||||
|
|
||||||
// ITU-R BT.2020 constant luminance system
|
// ITU-R BT.2020 constant luminance system
|
||||||
struct mp_cmat yuv_bt2020cl = {\
|
struct mp_cmat yuv_bt2020cl = { {{0.0000, 1.164384, 0.000000},
|
||||||
{{ 0.0000, 1.164384, 0.000000 },\
|
{0.00000, 0.000000, 1.138393},
|
||||||
{ 0.00000, 0.000000, 1.138393 },\
|
{1.138393, 0.000000, 0.000000}},
|
||||||
{ 1.138393, 0.000000 , 0.000000 }},\
|
{-0.571429, -0.073059, -0.571429}
|
||||||
{-0.571429, -0.073059, -0.571429 } };
|
};
|
||||||
|
|
||||||
float cms_matrix[3][3] = \
|
float cms_matrix[3][3] = { {1.660497, -0.124547, -0.018154},
|
||||||
{{ 1.660497, -0.124547, -0.018154},\
|
{-0.587657, 1.132895, -0.100597},
|
||||||
{-0.587657, 1.132895, -0.100597},\
|
{-0.072840, -0.008348, 1.118751}
|
||||||
{-0.072840, -0.008348, 1.118751}};
|
};
|
||||||
|
|
||||||
struct gl_vao_entry {
|
struct gl_vao_entry
|
||||||
|
{
|
||||||
// used for shader / glBindAttribLocation
|
// used for shader / glBindAttribLocation
|
||||||
const char *name;
|
const char *name;
|
||||||
// glVertexAttribPointer() arguments
|
// glVertexAttribPointer() arguments
|
||||||
@ -248,18 +251,20 @@ struct gl_vao_entry {
|
|||||||
int offset;
|
int offset;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct vertex_pt {
|
struct vertex_pt
|
||||||
|
{
|
||||||
float x, y;
|
float x, y;
|
||||||
};
|
};
|
||||||
|
|
||||||
struct vertex_pi {
|
struct vertex_pi
|
||||||
|
{
|
||||||
GLint x, y;
|
GLint x, y;
|
||||||
};
|
};
|
||||||
|
|
||||||
#define TEXUNIT_VIDEO_NUM 6
|
#define TEXUNIT_VIDEO_NUM 6
|
||||||
|
|
||||||
|
struct vertex
|
||||||
struct vertex {
|
{
|
||||||
struct vertex_pt position;
|
struct vertex_pt position;
|
||||||
struct vertex_pt texcoord[TEXUNIT_VIDEO_NUM];
|
struct vertex_pt texcoord[TEXUNIT_VIDEO_NUM];
|
||||||
};
|
};
|
||||||
@ -271,14 +276,13 @@ static const struct gl_vao_entry vertex_vao[] = {
|
|||||||
{0}
|
{0}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
static void compile_attach_shader(GLuint program, GLenum type, const char *source)
|
||||||
static void compile_attach_shader(GLuint program,
|
|
||||||
GLenum type, const char *source)
|
|
||||||
{
|
{
|
||||||
GLuint shader;
|
GLuint shader;
|
||||||
GLint status, log_length;
|
GLint status, log_length;
|
||||||
char log[4000];
|
char log[4000];
|
||||||
GLsizei len;
|
GLsizei len;
|
||||||
|
|
||||||
shader = glCreateShader(type);
|
shader = glCreateShader(type);
|
||||||
glShaderSource(shader, 1, &source, NULL);
|
glShaderSource(shader, 1, &source, NULL);
|
||||||
glCompileShader(shader);
|
glCompileShader(shader);
|
||||||
@ -286,9 +290,9 @@ static void compile_attach_shader(GLuint program,
|
|||||||
glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
|
glGetShaderiv(shader, GL_COMPILE_STATUS, &status);
|
||||||
log_length = 0;
|
log_length = 0;
|
||||||
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &log_length);
|
glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &log_length);
|
||||||
glGetShaderInfoLog(shader,4000,&len,log);
|
glGetShaderInfoLog(shader, 4000, &len, log);
|
||||||
GlxCheck();
|
GlxCheck();
|
||||||
Debug(3,"compile Status %d loglen %d >%s<\n",status,log_length,log);
|
Debug(3, "compile Status %d loglen %d >%s<\n", status, log_length, log);
|
||||||
|
|
||||||
glAttachShader(program, shader);
|
glAttachShader(program, shader);
|
||||||
glDeleteShader(shader);
|
glDeleteShader(shader);
|
||||||
@ -296,38 +300,39 @@ Debug(3,"compile Status %d loglen %d >%s<\n",status,log_length,log);
|
|||||||
|
|
||||||
static void link_shader(GLuint program)
|
static void link_shader(GLuint program)
|
||||||
{
|
{
|
||||||
GLint status,log_length;
|
GLint status, log_length;
|
||||||
|
|
||||||
glLinkProgram(program);
|
glLinkProgram(program);
|
||||||
status = 0;
|
status = 0;
|
||||||
glGetProgramiv(program, GL_LINK_STATUS, &status);
|
glGetProgramiv(program, GL_LINK_STATUS, &status);
|
||||||
log_length = 0;
|
log_length = 0;
|
||||||
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &log_length);
|
glGetProgramiv(program, GL_INFO_LOG_LENGTH, &log_length);
|
||||||
Debug(3,"Link Status %d loglen %d\n",status,log_length);
|
Debug(3, "Link Status %d loglen %d\n", status, log_length);
|
||||||
}
|
}
|
||||||
|
|
||||||
static GLuint sc_generate_osd(GLuint gl_prog) {
|
static GLuint sc_generate_osd(GLuint gl_prog)
|
||||||
|
{
|
||||||
|
|
||||||
Debug(3,"vor create osd\n");
|
Debug(3, "vor create osd\n");
|
||||||
gl_prog = glCreateProgram();
|
gl_prog = glCreateProgram();
|
||||||
Debug(3,"vor compile vertex osd\n");
|
Debug(3, "vor compile vertex osd\n");
|
||||||
compile_attach_shader(gl_prog, GL_VERTEX_SHADER, vertex_osd);
|
compile_attach_shader(gl_prog, GL_VERTEX_SHADER, vertex_osd);
|
||||||
Debug(3,"vor compile fragment osd \n");
|
Debug(3, "vor compile fragment osd \n");
|
||||||
compile_attach_shader(gl_prog, GL_FRAGMENT_SHADER, fragment_osd);
|
compile_attach_shader(gl_prog, GL_FRAGMENT_SHADER, fragment_osd);
|
||||||
glBindAttribLocation(gl_prog,0,"vertex_position");
|
glBindAttribLocation(gl_prog, 0, "vertex_position");
|
||||||
glBindAttribLocation(gl_prog,1,"vertex_texcoord0");
|
glBindAttribLocation(gl_prog, 1, "vertex_texcoord0");
|
||||||
|
|
||||||
link_shader(gl_prog);
|
link_shader(gl_prog);
|
||||||
return gl_prog;
|
return gl_prog;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
static GLuint sc_generate(GLuint gl_prog, enum AVColorSpace colorspace)
|
||||||
static GLuint sc_generate(GLuint gl_prog, enum AVColorSpace colorspace) {
|
{
|
||||||
|
|
||||||
char vname[80];
|
char vname[80];
|
||||||
int n;
|
int n;
|
||||||
GLint cmsLoc;
|
GLint cmsLoc;
|
||||||
float *m,*c,*cms;
|
float *m, *c, *cms;
|
||||||
char *frag;
|
char *frag;
|
||||||
|
|
||||||
switch (colorspace) {
|
switch (colorspace) {
|
||||||
@ -335,62 +340,62 @@ static GLuint sc_generate(GLuint gl_prog, enum AVColorSpace colorspace) {
|
|||||||
m = &yuv_bt601.m[0][0];
|
m = &yuv_bt601.m[0][0];
|
||||||
c = &yuv_bt601.c[0];
|
c = &yuv_bt601.c[0];
|
||||||
frag = fragment;
|
frag = fragment;
|
||||||
Debug(3,"BT601 Colorspace used\n");
|
Debug(3, "BT601 Colorspace used\n");
|
||||||
break;
|
break;
|
||||||
case AVCOL_SPC_BT709:
|
case AVCOL_SPC_BT709:
|
||||||
case AVCOL_SPC_UNSPECIFIED: // comes with UHD
|
case AVCOL_SPC_UNSPECIFIED: // comes with UHD
|
||||||
m = &yuv_bt709.m[0][0];
|
m = &yuv_bt709.m[0][0];
|
||||||
c = &yuv_bt709.c[0];
|
c = &yuv_bt709.c[0];
|
||||||
frag = fragment;
|
frag = fragment;
|
||||||
Debug(3,"BT709 Colorspace used\n");
|
Debug(3, "BT709 Colorspace used\n");
|
||||||
break;
|
break;
|
||||||
case AVCOL_SPC_BT2020_NCL:
|
case AVCOL_SPC_BT2020_NCL:
|
||||||
m = &yuv_bt2020ncl.m[0][0];
|
m = &yuv_bt2020ncl.m[0][0];
|
||||||
c = &yuv_bt2020ncl.c[0];
|
c = &yuv_bt2020ncl.c[0];
|
||||||
cms = &cms_matrix[0][0];
|
cms = &cms_matrix[0][0];
|
||||||
frag = fragment_bt2100;
|
frag = fragment_bt2100;
|
||||||
Debug(3,"BT2020NCL Colorspace used\n");
|
Debug(3, "BT2020NCL Colorspace used\n");
|
||||||
break;
|
break;
|
||||||
default: // fallback
|
default: // fallback
|
||||||
m = &yuv_bt709.m[0][0];
|
m = &yuv_bt709.m[0][0];
|
||||||
c = &yuv_bt709.c[0];
|
c = &yuv_bt709.c[0];
|
||||||
frag = fragment;
|
frag = fragment;
|
||||||
Debug(3,"default BT709 Colorspace used %d\n",colorspace);
|
Debug(3, "default BT709 Colorspace used %d\n", colorspace);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
|
||||||
Debug(3,"vor create\n");
|
Debug(3, "vor create\n");
|
||||||
gl_prog = glCreateProgram();
|
gl_prog = glCreateProgram();
|
||||||
Debug(3,"vor compile vertex\n");
|
Debug(3, "vor compile vertex\n");
|
||||||
compile_attach_shader(gl_prog, GL_VERTEX_SHADER, vertex);
|
compile_attach_shader(gl_prog, GL_VERTEX_SHADER, vertex);
|
||||||
Debug(3,"vor compile fragment\n");
|
Debug(3, "vor compile fragment\n");
|
||||||
compile_attach_shader(gl_prog, GL_FRAGMENT_SHADER, frag);
|
compile_attach_shader(gl_prog, GL_FRAGMENT_SHADER, frag);
|
||||||
glBindAttribLocation(gl_prog,0,"vertex_position");
|
glBindAttribLocation(gl_prog, 0, "vertex_position");
|
||||||
|
|
||||||
for (n=0;n<6;n++) {
|
for (n = 0; n < 6; n++) {
|
||||||
sprintf(vname,"vertex_texcoord%1d",n);
|
sprintf(vname, "vertex_texcoord%1d", n);
|
||||||
glBindAttribLocation(gl_prog,n+1,vname);
|
glBindAttribLocation(gl_prog, n + 1, vname);
|
||||||
}
|
}
|
||||||
|
|
||||||
link_shader(gl_prog);
|
link_shader(gl_prog);
|
||||||
|
|
||||||
gl_colormatrix = glGetUniformLocation(gl_prog,"colormatrix");
|
gl_colormatrix = glGetUniformLocation(gl_prog, "colormatrix");
|
||||||
Debug(3,"get uniform colormatrix %d \n",gl_colormatrix);
|
Debug(3, "get uniform colormatrix %d \n", gl_colormatrix);
|
||||||
if (gl_colormatrix != -1)
|
if (gl_colormatrix != -1)
|
||||||
glProgramUniformMatrix3fv(gl_prog,gl_colormatrix,1,0,m);
|
glProgramUniformMatrix3fv(gl_prog, gl_colormatrix, 1, 0, m);
|
||||||
GlxCheck();
|
GlxCheck();
|
||||||
Debug(3,"nach set colormatrix\n");
|
Debug(3, "nach set colormatrix\n");
|
||||||
|
|
||||||
gl_colormatrix_c = glGetUniformLocation(gl_prog,"colormatrix_c");
|
gl_colormatrix_c = glGetUniformLocation(gl_prog, "colormatrix_c");
|
||||||
Debug(3,"get uniform colormatrix_c %d %f\n",gl_colormatrix_c,*c);
|
Debug(3, "get uniform colormatrix_c %d %f\n", gl_colormatrix_c, *c);
|
||||||
if (gl_colormatrix_c != -1)
|
if (gl_colormatrix_c != -1)
|
||||||
glProgramUniform3fv(gl_prog,gl_colormatrix_c,1,c);
|
glProgramUniform3fv(gl_prog, gl_colormatrix_c, 1, c);
|
||||||
GlxCheck();
|
GlxCheck();
|
||||||
|
|
||||||
if (colorspace == AVCOL_SPC_BT2020_NCL) {
|
if (colorspace == AVCOL_SPC_BT2020_NCL) {
|
||||||
cmsLoc = glGetUniformLocation(gl_prog,"cms_matrix");
|
cmsLoc = glGetUniformLocation(gl_prog, "cms_matrix");
|
||||||
if (cmsLoc != -1)
|
if (cmsLoc != -1)
|
||||||
glProgramUniformMatrix3fv(gl_prog,cmsLoc,1,0,cms);
|
glProgramUniformMatrix3fv(gl_prog, cmsLoc, 1, 0, cms);
|
||||||
GlxCheck();
|
GlxCheck();
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -402,45 +407,44 @@ static void render_pass_quad(int flip, float xcrop, float ycrop)
|
|||||||
struct vertex va[4];
|
struct vertex va[4];
|
||||||
int n;
|
int n;
|
||||||
const struct gl_vao_entry *e;
|
const struct gl_vao_entry *e;
|
||||||
|
|
||||||
// uhhhh what a hack
|
// uhhhh what a hack
|
||||||
if (!flip ) {
|
if (!flip) {
|
||||||
va[0].position.x = (float) -1.0;
|
va[0].position.x = (float)-1.0;
|
||||||
va[0].position.y = (float) 1.0;
|
va[0].position.y = (float)1.0;
|
||||||
va[1].position.x = (float) -1.0;
|
va[1].position.x = (float)-1.0;
|
||||||
va[1].position.y = (float) -1.0;
|
va[1].position.y = (float)-1.0;
|
||||||
va[2].position.x = (float) 1.0;
|
va[2].position.x = (float)1.0;
|
||||||
va[2].position.y = (float) 1.0;
|
va[2].position.y = (float)1.0;
|
||||||
va[3].position.x = (float) 1.0;
|
va[3].position.x = (float)1.0;
|
||||||
va[3].position.y = (float) -1.0;
|
va[3].position.y = (float)-1.0;
|
||||||
} else {
|
} else {
|
||||||
va[0].position.x = (float) -1.0;
|
va[0].position.x = (float)-1.0;
|
||||||
va[0].position.y = (float) -1.0;
|
va[0].position.y = (float)-1.0;
|
||||||
va[1].position.x = (float) -1.0;
|
va[1].position.x = (float)-1.0;
|
||||||
va[1].position.y = (float) 1.0;
|
va[1].position.y = (float)1.0;
|
||||||
va[2].position.x = (float) 1.0;
|
va[2].position.x = (float)1.0;
|
||||||
va[2].position.y = (float) -1.0;
|
va[2].position.y = (float)-1.0;
|
||||||
va[3].position.x = (float) 1.0;
|
va[3].position.x = (float)1.0;
|
||||||
va[3].position.y = (float) 1.0;
|
va[3].position.y = (float)1.0;
|
||||||
}
|
}
|
||||||
|
|
||||||
va[0].texcoord[0].x = (float) 0.0 + xcrop;
|
va[0].texcoord[0].x = (float)0.0 + xcrop;
|
||||||
va[0].texcoord[0].y = (float) 0.0 + ycrop; // abgeschnitten von links oben
|
va[0].texcoord[0].y = (float)0.0 + ycrop; // abgeschnitten von links oben
|
||||||
va[0].texcoord[1].x = (float) 0.0 + xcrop;
|
va[0].texcoord[1].x = (float)0.0 + xcrop;
|
||||||
va[0].texcoord[1].y = (float) 0.0 + ycrop; // abgeschnitten von links oben
|
va[0].texcoord[1].y = (float)0.0 + ycrop; // abgeschnitten von links oben
|
||||||
va[1].texcoord[0].x = (float) 0.0 + xcrop;
|
va[1].texcoord[0].x = (float)0.0 + xcrop;
|
||||||
va[1].texcoord[0].y = (float) 1.0 - ycrop; // abgeschnitten links unten 1.0 - Wert
|
va[1].texcoord[0].y = (float)1.0 - ycrop; // abgeschnitten links unten 1.0 - Wert
|
||||||
va[1].texcoord[1].x = (float) 0.0 + xcrop;
|
va[1].texcoord[1].x = (float)0.0 + xcrop;
|
||||||
va[1].texcoord[1].y = (float) 1.0 - ycrop; // abgeschnitten links unten 1.0 - Wert
|
va[1].texcoord[1].y = (float)1.0 - ycrop; // abgeschnitten links unten 1.0 - Wert
|
||||||
va[2].texcoord[0].x = (float) 1.0 - xcrop;
|
va[2].texcoord[0].x = (float)1.0 - xcrop;
|
||||||
va[2].texcoord[0].y = (float) 0.0 + ycrop; // abgeschnitten von rechts oben
|
va[2].texcoord[0].y = (float)0.0 + ycrop; // abgeschnitten von rechts oben
|
||||||
va[2].texcoord[1].x = (float) 1.0 - xcrop;
|
va[2].texcoord[1].x = (float)1.0 - xcrop;
|
||||||
va[2].texcoord[1].y = (float) 0.0 + ycrop; // abgeschnitten von rechts oben
|
va[2].texcoord[1].y = (float)0.0 + ycrop; // abgeschnitten von rechts oben
|
||||||
va[3].texcoord[0].x = (float) 1.0 - xcrop;
|
va[3].texcoord[0].x = (float)1.0 - xcrop;
|
||||||
va[3].texcoord[0].y = (float) 1.0 - ycrop; // abgeschnitten von rechts unten 1.0 - wert
|
va[3].texcoord[0].y = (float)1.0 - ycrop; // abgeschnitten von rechts unten 1.0 - wert
|
||||||
va[3].texcoord[1].x = (float) 1.0 - xcrop;
|
va[3].texcoord[1].x = (float)1.0 - xcrop;
|
||||||
va[3].texcoord[1].y = (float) 1.0 - ycrop; // abgeschnitten von rechts unten 1.0 - wert
|
va[3].texcoord[1].y = (float)1.0 - ycrop; // abgeschnitten von rechts unten 1.0 - wert
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
glBindBuffer(GL_ARRAY_BUFFER, vao_buffer);
|
glBindBuffer(GL_ARRAY_BUFFER, vao_buffer);
|
||||||
glBufferData(GL_ARRAY_BUFFER, 4 * sizeof(struct vertex), va, GL_DYNAMIC_DRAW);
|
glBufferData(GL_ARRAY_BUFFER, 4 * sizeof(struct vertex), va, GL_DYNAMIC_DRAW);
|
||||||
@ -448,18 +452,16 @@ static void render_pass_quad(int flip, float xcrop, float ycrop)
|
|||||||
|
|
||||||
// enable attribs
|
// enable attribs
|
||||||
glBindBuffer(GL_ARRAY_BUFFER, vao_buffer);
|
glBindBuffer(GL_ARRAY_BUFFER, vao_buffer);
|
||||||
for ( n = 0; vertex_vao[n].name; n++) {
|
for (n = 0; vertex_vao[n].name; n++) {
|
||||||
e = &vertex_vao[n];
|
e = &vertex_vao[n];
|
||||||
glEnableVertexAttribArray(n);
|
glEnableVertexAttribArray(n);
|
||||||
glVertexAttribPointer(n, e->num_elems, e->type, e->normalized,
|
glVertexAttribPointer(n, e->num_elems, e->type, e->normalized, sizeof(struct vertex),
|
||||||
sizeof(struct vertex), (void *)(intptr_t)e->offset);
|
(void *)(intptr_t) e->offset);
|
||||||
}
|
}
|
||||||
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
glBindBuffer(GL_ARRAY_BUFFER, 0);
|
||||||
|
|
||||||
// draw quad
|
// draw quad
|
||||||
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
glDrawArrays(GL_TRIANGLE_STRIP, 0, 4);
|
||||||
for ( n = 0; vertex_vao[n].name; n++)
|
for (n = 0; vertex_vao[n].name; n++)
|
||||||
glDisableVertexAttribArray(n);
|
glDisableVertexAttribArray(n);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
614
softhdcuvid.cpp
614
softhdcuvid.cpp
File diff suppressed because it is too large
Load Diff
243
softhddev.c
243
softhddev.c
@ -232,10 +232,8 @@ static int MpegCheck(const uint8_t * data, int size)
|
|||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
if (0) {
|
if (0) {
|
||||||
Debug(3,
|
Debug(3, "pesdemux: mpeg%s layer%d bitrate=%d samplerate=%d %d bytes\n", mpeg25 ? "2.5" : mpeg2 ? "2" : "1",
|
||||||
"pesdemux: mpeg%s layer%d bitrate=%d samplerate=%d %d bytes\n",
|
layer, bit_rate, sample_rate, frame_size);
|
||||||
mpeg25 ? "2.5" : mpeg2 ? "2" : "1", layer, bit_rate, sample_rate,
|
|
||||||
frame_size);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if (frame_size + 4 > size) {
|
if (frame_size + 4 > size) {
|
||||||
@ -580,8 +578,7 @@ static void PesInit(PesDemux * pesdx)
|
|||||||
/// @param size number of payload data bytes
|
/// @param size number of payload data bytes
|
||||||
/// @param is_start flag, start of pes packet
|
/// @param is_start flag, start of pes packet
|
||||||
///
|
///
|
||||||
static void PesParse(PesDemux * pesdx, const uint8_t * data, int size,
|
static void PesParse(PesDemux * pesdx, const uint8_t * data, int size, int is_start)
|
||||||
int is_start)
|
|
||||||
{
|
{
|
||||||
const uint8_t *p;
|
const uint8_t *p;
|
||||||
const uint8_t *q;
|
const uint8_t *q;
|
||||||
@ -686,8 +683,7 @@ static void PesParse(PesDemux * pesdx, const uint8_t * data, int size,
|
|||||||
|
|
||||||
// new codec id, close and open new
|
// new codec id, close and open new
|
||||||
if (AudioCodecID != codec_id) {
|
if (AudioCodecID != codec_id) {
|
||||||
Debug(3, "pesdemux: new codec %#06x -> %#06x\n",
|
Debug(3, "pesdemux: new codec %#06x -> %#06x\n", AudioCodecID, codec_id);
|
||||||
AudioCodecID, codec_id);
|
|
||||||
CodecAudioClose(MyAudioDecoder);
|
CodecAudioClose(MyAudioDecoder);
|
||||||
CodecAudioOpen(MyAudioDecoder, codec_id);
|
CodecAudioOpen(MyAudioDecoder, codec_id);
|
||||||
AudioCodecID = codec_id;
|
AudioCodecID = codec_id;
|
||||||
@ -709,7 +705,7 @@ static void PesParse(PesDemux * pesdx, const uint8_t * data, int size,
|
|||||||
if (AudioCodecID != AV_CODEC_ID_NONE) {
|
if (AudioCodecID != AV_CODEC_ID_NONE) {
|
||||||
// shouldn't happen after we have a vaild codec
|
// shouldn't happen after we have a vaild codec
|
||||||
// detected
|
// detected
|
||||||
Debug(4, "pesdemux: skip @%d %02x\n", pesdx->Skip,q[0]);
|
Debug(4, "pesdemux: skip @%d %02x\n", pesdx->Skip, q[0]);
|
||||||
}
|
}
|
||||||
// try next byte
|
// try next byte
|
||||||
++pesdx->Skip;
|
++pesdx->Skip;
|
||||||
@ -782,23 +778,22 @@ static void PesParse(PesDemux * pesdx, const uint8_t * data, int size,
|
|||||||
|
|
||||||
if ((pesdx->Header[7] & 0xC0) == 0x80) {
|
if ((pesdx->Header[7] & 0xC0) == 0x80) {
|
||||||
pts =
|
pts =
|
||||||
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 |
|
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] & 0xFE) << 14 | data[12] << 7
|
||||||
(data[11] & 0xFE) << 14 | data[12] << 7 | (data[13]
|
| (data[13]
|
||||||
& 0xFE) >> 1;
|
& 0xFE) >> 1;
|
||||||
pesdx->PTS = pts;
|
pesdx->PTS = pts;
|
||||||
pesdx->DTS = AV_NOPTS_VALUE;
|
pesdx->DTS = AV_NOPTS_VALUE;
|
||||||
} else if ((pesdx->Header[7] & 0xC0) == 0xC0) {
|
} else if ((pesdx->Header[7] & 0xC0) == 0xC0) {
|
||||||
pts =
|
pts =
|
||||||
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 |
|
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] & 0xFE) << 14 | data[12] << 7
|
||||||
(data[11] & 0xFE) << 14 | data[12] << 7 | (data[13]
|
| (data[13]
|
||||||
& 0xFE) >> 1;
|
& 0xFE) >> 1;
|
||||||
pesdx->PTS = pts;
|
pesdx->PTS = pts;
|
||||||
dts =
|
dts =
|
||||||
(int64_t) (data[14] & 0x0E) << 29 | data[15] << 22
|
(int64_t) (data[14] & 0x0E) << 29 | data[15] << 22 | (data[16] & 0xFE) << 14 | data[17] <<
|
||||||
| (data[16] & 0xFE) << 14 | data[17] << 7 |
|
7 | (data[18] & 0xFE) >> 1;
|
||||||
(data[18] & 0xFE) >> 1;
|
|
||||||
pesdx->DTS = dts;
|
pesdx->DTS = dts;
|
||||||
Debug(4,"pesdemux: pts %#012" PRIx64 " %#012" PRIx64 "\n", pts, dts);
|
Debug(4, "pesdemux: pts %#012" PRIx64 " %#012" PRIx64 "\n", pts, dts);
|
||||||
}
|
}
|
||||||
empty_header:
|
empty_header:
|
||||||
pesdx->State = PES_INIT;
|
pesdx->State = PES_INIT;
|
||||||
@ -831,31 +826,24 @@ static void PesParse(PesDemux * pesdx, const uint8_t * data, int size,
|
|||||||
if (AudioCodecID != AV_CODEC_ID_PCM_DVD) {
|
if (AudioCodecID != AV_CODEC_ID_PCM_DVD) {
|
||||||
|
|
||||||
q = pesdx->Header;
|
q = pesdx->Header;
|
||||||
Debug(3, "pesdemux: LPCM %d sr:%d bits:%d chan:%d\n",
|
Debug(3, "pesdemux: LPCM %d sr:%d bits:%d chan:%d\n", q[0], q[5] >> 4,
|
||||||
q[0], q[5] >> 4, (((q[5] >> 6) & 0x3) + 4) * 4,
|
(((q[5] >> 6) & 0x3) + 4) * 4, (q[5] & 0x7) + 1);
|
||||||
(q[5] & 0x7) + 1);
|
|
||||||
CodecAudioClose(MyAudioDecoder);
|
CodecAudioClose(MyAudioDecoder);
|
||||||
|
|
||||||
bits_per_sample = (((q[5] >> 6) & 0x3) + 4) * 4;
|
bits_per_sample = (((q[5] >> 6) & 0x3) + 4) * 4;
|
||||||
if (bits_per_sample != 16) {
|
if (bits_per_sample != 16) {
|
||||||
Error(_
|
Error(_("softhddev: LPCM %d bits per sample aren't supported\n"), bits_per_sample);
|
||||||
("softhddev: LPCM %d bits per sample aren't supported\n"),
|
|
||||||
bits_per_sample);
|
|
||||||
// FIXME: handle unsupported formats.
|
// FIXME: handle unsupported formats.
|
||||||
}
|
}
|
||||||
samplerate = samplerates[q[5] >> 4];
|
samplerate = samplerates[q[5] >> 4];
|
||||||
channels = (q[5] & 0x7) + 1;
|
channels = (q[5] & 0x7) + 1;
|
||||||
AudioSetup(&samplerate, &channels, 0);
|
AudioSetup(&samplerate, &channels, 0);
|
||||||
if (samplerate != samplerates[q[5] >> 4]) {
|
if (samplerate != samplerates[q[5] >> 4]) {
|
||||||
Error(_
|
Error(_("softhddev: LPCM %d sample-rate is unsupported\n"), samplerates[q[5] >> 4]);
|
||||||
("softhddev: LPCM %d sample-rate is unsupported\n"),
|
|
||||||
samplerates[q[5] >> 4]);
|
|
||||||
// FIXME: support resample
|
// FIXME: support resample
|
||||||
}
|
}
|
||||||
if (channels != (q[5] & 0x7) + 1) {
|
if (channels != (q[5] & 0x7) + 1) {
|
||||||
Error(_
|
Error(_("softhddev: LPCM %d channels are unsupported\n"), (q[5] & 0x7) + 1);
|
||||||
("softhddev: LPCM %d channels are unsupported\n"),
|
|
||||||
(q[5] & 0x7) + 1);
|
|
||||||
// FIXME: support resample
|
// FIXME: support resample
|
||||||
}
|
}
|
||||||
//CodecAudioOpen(MyAudioDecoder, AV_CODEC_ID_PCM_DVD);
|
//CodecAudioOpen(MyAudioDecoder, AV_CODEC_ID_PCM_DVD);
|
||||||
@ -949,8 +937,7 @@ static int TsDemuxer(TsDemux * tsdx, const uint8_t * data, int size)
|
|||||||
}
|
}
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
pid = (p[1] & 0x1F) << 8 | p[2];
|
pid = (p[1] & 0x1F) << 8 | p[2];
|
||||||
Debug(4, "tsdemux: PID: %#04x%s%s\n", pid, p[1] & 0x40 ? " start" : "",
|
Debug(4, "tsdemux: PID: %#04x%s%s\n", pid, p[1] & 0x40 ? " start" : "", p[3] & 0x10 ? " payload" : "");
|
||||||
p[3] & 0x10 ? " payload" : "");
|
|
||||||
#endif
|
#endif
|
||||||
// skip adaptation field
|
// skip adaptation field
|
||||||
switch (p[3] & 0x30) { // adaption field
|
switch (p[3] & 0x30) { // adaption field
|
||||||
@ -1011,12 +998,12 @@ int PlayAudio(const uint8_t * data, int size, uint8_t id)
|
|||||||
if (SkipAudio || !MyAudioDecoder) { // skip audio
|
if (SkipAudio || !MyAudioDecoder) { // skip audio
|
||||||
return size;
|
return size;
|
||||||
}
|
}
|
||||||
if (StreamFreezed ) { // stream freezed
|
if (StreamFreezed) { // stream freezed
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
if (AudioDelay) {
|
if (AudioDelay) {
|
||||||
Debug(3,"AudioDelay %dms\n",AudioDelay);
|
Debug(3, "AudioDelay %dms\n", AudioDelay);
|
||||||
usleep(AudioDelay/90);
|
usleep(AudioDelay / 90);
|
||||||
AudioDelay = 0;
|
AudioDelay = 0;
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
@ -1035,8 +1022,7 @@ int PlayAudio(const uint8_t * data, int size, uint8_t id)
|
|||||||
}
|
}
|
||||||
#ifdef USE_SOFTLIMIT
|
#ifdef USE_SOFTLIMIT
|
||||||
// soft limit buffer full
|
// soft limit buffer full
|
||||||
if (AudioSyncStream && VideoGetBuffers(AudioSyncStream) > 3
|
if (AudioSyncStream && VideoGetBuffers(AudioSyncStream) > 3 && AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE * 2) {
|
||||||
&& AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE * 2) {
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
@ -1061,14 +1047,13 @@ int PlayAudio(const uint8_t * data, int size, uint8_t id)
|
|||||||
|
|
||||||
if (data[7] & 0x80 && n >= 5) {
|
if (data[7] & 0x80 && n >= 5) {
|
||||||
AudioAvPkt->pts =
|
AudioAvPkt->pts =
|
||||||
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] &
|
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] & 0xFE) << 14 | data[12] << 7 | (data[13] &
|
||||||
0xFE) << 14 | data[12] << 7 | (data[13] & 0xFE) >> 1;
|
0xFE) >> 1;
|
||||||
//Debug(3, "audio: pts %#012" PRIx64 "\n", AudioAvPkt->pts);
|
//Debug(3, "audio: pts %#012" PRIx64 "\n", AudioAvPkt->pts);
|
||||||
}
|
}
|
||||||
if (0) { // dts is unused
|
if (0) { // dts is unused
|
||||||
if (data[7] & 0x40) {
|
if (data[7] & 0x40) {
|
||||||
AudioAvPkt->dts =
|
AudioAvPkt->dts = (int64_t) (data[14] & 0x0E) << 29 | data[15] << 22 | (data[16]
|
||||||
(int64_t) (data[14] & 0x0E) << 29 | data[15] << 22 | (data[16]
|
|
||||||
& 0xFE) << 14 | data[17] << 7 | (data[18] & 0xFE) >> 1;
|
& 0xFE) << 14 | data[17] << 7 | (data[18] & 0xFE) >> 1;
|
||||||
Debug(3, "audio: dts %#012" PRIx64 "\n", AudioAvPkt->dts);
|
Debug(3, "audio: dts %#012" PRIx64 "\n", AudioAvPkt->dts);
|
||||||
}
|
}
|
||||||
@ -1098,16 +1083,13 @@ int PlayAudio(const uint8_t * data, int size, uint8_t id)
|
|||||||
int channels;
|
int channels;
|
||||||
int bits_per_sample;
|
int bits_per_sample;
|
||||||
|
|
||||||
Debug(3, "[softhddev]%s: LPCM %d sr:%d bits:%d chan:%d\n",
|
Debug(3, "[softhddev]%s: LPCM %d sr:%d bits:%d chan:%d\n", __FUNCTION__, id, p[5] >> 4,
|
||||||
__FUNCTION__, id, p[5] >> 4, (((p[5] >> 6) & 0x3) + 4) * 4,
|
(((p[5] >> 6) & 0x3) + 4) * 4, (p[5] & 0x7) + 1);
|
||||||
(p[5] & 0x7) + 1);
|
|
||||||
CodecAudioClose(MyAudioDecoder);
|
CodecAudioClose(MyAudioDecoder);
|
||||||
|
|
||||||
bits_per_sample = (((p[5] >> 6) & 0x3) + 4) * 4;
|
bits_per_sample = (((p[5] >> 6) & 0x3) + 4) * 4;
|
||||||
if (bits_per_sample != 16) {
|
if (bits_per_sample != 16) {
|
||||||
Error(_
|
Error(_("[softhddev] LPCM %d bits per sample aren't supported\n"), bits_per_sample);
|
||||||
("[softhddev] LPCM %d bits per sample aren't supported\n"),
|
|
||||||
bits_per_sample);
|
|
||||||
// FIXME: handle unsupported formats.
|
// FIXME: handle unsupported formats.
|
||||||
}
|
}
|
||||||
samplerate = samplerates[p[5] >> 4];
|
samplerate = samplerates[p[5] >> 4];
|
||||||
@ -1117,13 +1099,11 @@ int PlayAudio(const uint8_t * data, int size, uint8_t id)
|
|||||||
AudioSetBufferTime(400);
|
AudioSetBufferTime(400);
|
||||||
AudioSetup(&samplerate, &channels, 0);
|
AudioSetup(&samplerate, &channels, 0);
|
||||||
if (samplerate != samplerates[p[5] >> 4]) {
|
if (samplerate != samplerates[p[5] >> 4]) {
|
||||||
Error(_("[softhddev] LPCM %d sample-rate is unsupported\n"),
|
Error(_("[softhddev] LPCM %d sample-rate is unsupported\n"), samplerates[p[5] >> 4]);
|
||||||
samplerates[p[5] >> 4]);
|
|
||||||
// FIXME: support resample
|
// FIXME: support resample
|
||||||
}
|
}
|
||||||
if (channels != (p[5] & 0x7) + 1) {
|
if (channels != (p[5] & 0x7) + 1) {
|
||||||
Error(_("[softhddev] LPCM %d channels are unsupported\n"),
|
Error(_("[softhddev] LPCM %d channels are unsupported\n"), (p[5] & 0x7) + 1);
|
||||||
(p[5] & 0x7) + 1);
|
|
||||||
// FIXME: support resample
|
// FIXME: support resample
|
||||||
}
|
}
|
||||||
//CodecAudioOpen(MyAudioDecoder, AV_CODEC_ID_PCM_DVD);
|
//CodecAudioOpen(MyAudioDecoder, AV_CODEC_ID_PCM_DVD);
|
||||||
@ -1268,14 +1248,13 @@ int PlayTsAudio(const uint8_t * data, int size)
|
|||||||
}
|
}
|
||||||
#ifdef USE_SOFTLIMIT
|
#ifdef USE_SOFTLIMIT
|
||||||
// soft limit buffer full
|
// soft limit buffer full
|
||||||
if (AudioSyncStream && VideoGetBuffers(AudioSyncStream) > 3
|
if (AudioSyncStream && VideoGetBuffers(AudioSyncStream) > 3 && AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE * 2) {
|
||||||
&& AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE * 2) {
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
if (AudioDelay) {
|
if (AudioDelay) {
|
||||||
Debug(3,"AudioDelay %dms\n",AudioDelay);
|
Debug(3, "AudioDelay %dms\n", AudioDelay);
|
||||||
usleep(AudioDelay*1000);
|
usleep(AudioDelay * 1000);
|
||||||
AudioDelay = 0;
|
AudioDelay = 0;
|
||||||
// TsDemuxer(tsdx, data, size); // insert dummy audio
|
// TsDemuxer(tsdx, data, size); // insert dummy audio
|
||||||
|
|
||||||
@ -1411,8 +1390,7 @@ static void VideoPacketExit(VideoStream * stream)
|
|||||||
** @param data data of pes packet
|
** @param data data of pes packet
|
||||||
** @param size size of pes packet
|
** @param size size of pes packet
|
||||||
*/
|
*/
|
||||||
static void VideoEnqueue(VideoStream * stream, int64_t pts, int64_t dts, const void *data,
|
static void VideoEnqueue(VideoStream * stream, int64_t pts, int64_t dts, const void *data, int size)
|
||||||
int size)
|
|
||||||
{
|
{
|
||||||
AVPacket *avpkt;
|
AVPacket *avpkt;
|
||||||
|
|
||||||
@ -1528,8 +1506,7 @@ static void VideoNextPacket(VideoStream * stream, int codec_id)
|
|||||||
** @param data data of pes packet
|
** @param data data of pes packet
|
||||||
** @param size size of pes packet
|
** @param size size of pes packet
|
||||||
*/
|
*/
|
||||||
static void VideoMpegEnqueue(VideoStream * stream, int64_t pts, int64_t dts,
|
static void VideoMpegEnqueue(VideoStream * stream, int64_t pts, int64_t dts, const uint8_t * data, int size)
|
||||||
const uint8_t * data, int size)
|
|
||||||
{
|
{
|
||||||
static const char startcode[3] = { 0x00, 0x00, 0x01 };
|
static const char startcode[3] = { 0x00, 0x00, 0x01 };
|
||||||
const uint8_t *p;
|
const uint8_t *p;
|
||||||
@ -1556,7 +1533,7 @@ static void VideoMpegEnqueue(VideoStream * stream, int64_t pts, int64_t dts,
|
|||||||
#endif
|
#endif
|
||||||
if (!p[0] || p[0] == 0xb3) {
|
if (!p[0] || p[0] == 0xb3) {
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
printf("last: %d start aspect %02x\n", stream->StartCodeState,p[4]);
|
printf("last: %d start aspect %02x\n", stream->StartCodeState, p[4]);
|
||||||
#endif
|
#endif
|
||||||
stream->PacketRb[stream->PacketWrite].stream_index -= 3;
|
stream->PacketRb[stream->PacketWrite].stream_index -= 3;
|
||||||
VideoNextPacket(stream, AV_CODEC_ID_MPEG2VIDEO);
|
VideoNextPacket(stream, AV_CODEC_ID_MPEG2VIDEO);
|
||||||
@ -1574,7 +1551,7 @@ static void VideoMpegEnqueue(VideoStream * stream, int64_t pts, int64_t dts,
|
|||||||
#endif
|
#endif
|
||||||
if (p[0] == 0x01 && (!p[1] || p[1] == 0xb3)) {
|
if (p[0] == 0x01 && (!p[1] || p[1] == 0xb3)) {
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
printf( "last: %d start aspect %02x\n", stream->StartCodeState,p[5]);
|
printf("last: %d start aspect %02x\n", stream->StartCodeState, p[5]);
|
||||||
#endif
|
#endif
|
||||||
stream->PacketRb[stream->PacketWrite].stream_index -= 2;
|
stream->PacketRb[stream->PacketWrite].stream_index -= 2;
|
||||||
VideoNextPacket(stream, AV_CODEC_ID_MPEG2VIDEO);
|
VideoNextPacket(stream, AV_CODEC_ID_MPEG2VIDEO);
|
||||||
@ -1591,7 +1568,7 @@ static void VideoMpegEnqueue(VideoStream * stream, int64_t pts, int64_t dts,
|
|||||||
#endif
|
#endif
|
||||||
if (!p[0] && p[1] == 0x01 && (!p[2] || p[2] == 0xb3)) {
|
if (!p[0] && p[1] == 0x01 && (!p[2] || p[2] == 0xb3)) {
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
printf( "last: %d start aspect %02x\n", stream->StartCodeState,p[6]);
|
printf("last: %d start aspect %02x\n", stream->StartCodeState, p[6]);
|
||||||
#endif
|
#endif
|
||||||
stream->PacketRb[stream->PacketWrite].stream_index -= 1;
|
stream->PacketRb[stream->PacketWrite].stream_index -= 1;
|
||||||
VideoNextPacket(stream, AV_CODEC_ID_MPEG2VIDEO);
|
VideoNextPacket(stream, AV_CODEC_ID_MPEG2VIDEO);
|
||||||
@ -1725,8 +1702,8 @@ static void FixPacketForFFMpeg(VideoDecoder * vdecoder, AVPacket * avpkt)
|
|||||||
tmp->size = p - tmp->data;
|
tmp->size = p - tmp->data;
|
||||||
#if STILL_DEBUG>1
|
#if STILL_DEBUG>1
|
||||||
if (InStillPicture) {
|
if (InStillPicture) {
|
||||||
fprintf(stderr, "\nfix:%9d,%02x %02x %02x %02x\n", tmp->size,
|
fprintf(stderr, "\nfix:%9d,%02x %02x %02x %02x\n", tmp->size, tmp->data[0], tmp->data[1], tmp->data[2],
|
||||||
tmp->data[0], tmp->data[1], tmp->data[2], tmp->data[3]);
|
tmp->data[3]);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
CodecVideoDecode(vdecoder, tmp);
|
CodecVideoDecode(vdecoder, tmp);
|
||||||
@ -1742,15 +1719,14 @@ static void FixPacketForFFMpeg(VideoDecoder * vdecoder, AVPacket * avpkt)
|
|||||||
|
|
||||||
#if STILL_DEBUG>1
|
#if STILL_DEBUG>1
|
||||||
if (InStillPicture) {
|
if (InStillPicture) {
|
||||||
fprintf(stderr, "\nfix:%9d.%02x %02x %02x %02x\n", tmp->size,
|
fprintf(stderr, "\nfix:%9d.%02x %02x %02x %02x\n", tmp->size, tmp->data[0], tmp->data[1], tmp->data[2],
|
||||||
tmp->data[0], tmp->data[1], tmp->data[2], tmp->data[3]);
|
tmp->data[3]);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
CodecVideoDecode(vdecoder, tmp);
|
CodecVideoDecode(vdecoder, tmp);
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
** Open video stream.
|
** Open video stream.
|
||||||
**
|
**
|
||||||
@ -1782,7 +1758,8 @@ static void VideoStreamClose(VideoStream * stream, int delhw)
|
|||||||
stream->SkipStream = 1;
|
stream->SkipStream = 1;
|
||||||
if (stream->Decoder) {
|
if (stream->Decoder) {
|
||||||
VideoDecoder *decoder;
|
VideoDecoder *decoder;
|
||||||
Debug(3,"VideoStreamClose");
|
|
||||||
|
Debug(3, "VideoStreamClose");
|
||||||
decoder = stream->Decoder;
|
decoder = stream->Decoder;
|
||||||
// FIXME: remove this lock for main stream close
|
// FIXME: remove this lock for main stream close
|
||||||
pthread_mutex_lock(&stream->DecoderLockMutex);
|
pthread_mutex_lock(&stream->DecoderLockMutex);
|
||||||
@ -1832,7 +1809,7 @@ int VideoPollInput(VideoStream * stream)
|
|||||||
atomic_set(&stream->PacketsFilled, 0);
|
atomic_set(&stream->PacketsFilled, 0);
|
||||||
stream->PacketRead = stream->PacketWrite;
|
stream->PacketRead = stream->PacketWrite;
|
||||||
// FIXME: ->Decoder already checked
|
// FIXME: ->Decoder already checked
|
||||||
Debug(3,"Clear buffer request in Poll\n");
|
Debug(3, "Clear buffer request in Poll\n");
|
||||||
if (stream->Decoder) {
|
if (stream->Decoder) {
|
||||||
CodecVideoFlushBuffers(stream->Decoder);
|
CodecVideoFlushBuffers(stream->Decoder);
|
||||||
VideoResetStart(stream->HwDecoder);
|
VideoResetStart(stream->HwDecoder);
|
||||||
@ -1879,7 +1856,7 @@ int VideoDecodeInput(VideoStream * stream)
|
|||||||
// FIXME: ->Decoder already checked
|
// FIXME: ->Decoder already checked
|
||||||
if (stream->Decoder) {
|
if (stream->Decoder) {
|
||||||
CodecVideoFlushBuffers(stream->Decoder);
|
CodecVideoFlushBuffers(stream->Decoder);
|
||||||
Debug(3,"Clear buffer request in Decode\n");
|
Debug(3, "Clear buffer request in Decode\n");
|
||||||
VideoResetStart(stream->HwDecoder);
|
VideoResetStart(stream->HwDecoder);
|
||||||
}
|
}
|
||||||
stream->ClearBuffers = 0;
|
stream->ClearBuffers = 0;
|
||||||
@ -1897,7 +1874,7 @@ int VideoDecodeInput(VideoStream * stream)
|
|||||||
}
|
}
|
||||||
#if 0
|
#if 0
|
||||||
// clearing for normal channel switch has no advantage
|
// clearing for normal channel switch has no advantage
|
||||||
if (stream->ClearClose || stream->ClosingStream ) {
|
if (stream->ClearClose || stream->ClosingStream) {
|
||||||
int f;
|
int f;
|
||||||
|
|
||||||
// FIXME: during replay all packets are always checked
|
// FIXME: during replay all packets are always checked
|
||||||
@ -1908,8 +1885,7 @@ int VideoDecodeInput(VideoStream * stream)
|
|||||||
if (f) {
|
if (f) {
|
||||||
Debug(3, "video: cleared upto close\n");
|
Debug(3, "video: cleared upto close\n");
|
||||||
atomic_sub(f, &stream->PacketsFilled);
|
atomic_sub(f, &stream->PacketsFilled);
|
||||||
stream->PacketRead =
|
stream->PacketRead = (stream->PacketRead + f) % VIDEO_PACKET_MAX;
|
||||||
(stream->PacketRead + f) % VIDEO_PACKET_MAX;
|
|
||||||
stream->ClearClose = 0;
|
stream->ClearClose = 0;
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
@ -1927,7 +1903,7 @@ int VideoDecodeInput(VideoStream * stream)
|
|||||||
case AV_CODEC_ID_NONE:
|
case AV_CODEC_ID_NONE:
|
||||||
stream->ClosingStream = 0;
|
stream->ClosingStream = 0;
|
||||||
if (stream->LastCodecID != AV_CODEC_ID_NONE) {
|
if (stream->LastCodecID != AV_CODEC_ID_NONE) {
|
||||||
Debug(3,"in VideoDecode make close\n");
|
Debug(3, "in VideoDecode make close\n");
|
||||||
stream->LastCodecID = AV_CODEC_ID_NONE;
|
stream->LastCodecID = AV_CODEC_ID_NONE;
|
||||||
CodecVideoClose(stream->Decoder);
|
CodecVideoClose(stream->Decoder);
|
||||||
// FIXME: CodecVideoClose calls/uses hw decoder
|
// FIXME: CodecVideoClose calls/uses hw decoder
|
||||||
@ -1944,7 +1920,7 @@ int VideoDecodeInput(VideoStream * stream)
|
|||||||
break;
|
break;
|
||||||
case AV_CODEC_ID_H264:
|
case AV_CODEC_ID_H264:
|
||||||
if (stream->LastCodecID != AV_CODEC_ID_H264) {
|
if (stream->LastCodecID != AV_CODEC_ID_H264) {
|
||||||
Debug(3,"CodecVideoOpen h264\n");
|
Debug(3, "CodecVideoOpen h264\n");
|
||||||
stream->LastCodecID = AV_CODEC_ID_H264;
|
stream->LastCodecID = AV_CODEC_ID_H264;
|
||||||
CodecVideoOpen(stream->Decoder, AV_CODEC_ID_H264);
|
CodecVideoOpen(stream->Decoder, AV_CODEC_ID_H264);
|
||||||
}
|
}
|
||||||
@ -2049,7 +2025,7 @@ static void StopVideo(void)
|
|||||||
MyVideoStream->Decoder = NULL; // lock read thread
|
MyVideoStream->Decoder = NULL; // lock read thread
|
||||||
pthread_mutex_unlock(&MyVideoStream->DecoderLockMutex);
|
pthread_mutex_unlock(&MyVideoStream->DecoderLockMutex);
|
||||||
// FIXME: this can crash, hw decoder released by video exit
|
// FIXME: this can crash, hw decoder released by video exit
|
||||||
Debug(3,"in Stop Video");
|
Debug(3, "in Stop Video");
|
||||||
CodecVideoClose(decoder);
|
CodecVideoClose(decoder);
|
||||||
CodecVideoDelDecoder(decoder);
|
CodecVideoDelDecoder(decoder);
|
||||||
}
|
}
|
||||||
@ -2128,8 +2104,7 @@ static int ValidateMpeg(const uint8_t * data, int size)
|
|||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
if (data[0] || data[1] || data[2] != 0x01) {
|
if (data[0] || data[1] || data[2] != 0x01) {
|
||||||
printf("%02x: %02x %02x %02x %02x %02x\n", data[-1], data[0],
|
printf("%02x: %02x %02x %02x %02x %02x\n", data[-1], data[0], data[1], data[2], data[3], data[4]);
|
||||||
data[1], data[2], data[3], data[4]);
|
|
||||||
return -1;
|
return -1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -2163,10 +2138,11 @@ static int ValidateMpeg(const uint8_t * data, int size)
|
|||||||
int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
|
int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
|
||||||
{
|
{
|
||||||
const uint8_t *check;
|
const uint8_t *check;
|
||||||
int64_t pts,dts;
|
int64_t pts, dts;
|
||||||
int n;
|
int n;
|
||||||
int z;
|
int z;
|
||||||
int l;
|
int l;
|
||||||
|
|
||||||
if (!stream->Decoder) { // no x11 video started
|
if (!stream->Decoder) { // no x11 video started
|
||||||
return size;
|
return size;
|
||||||
}
|
}
|
||||||
@ -2197,8 +2173,7 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
|
|||||||
}
|
}
|
||||||
if (stream->InvalidPesCounter) {
|
if (stream->InvalidPesCounter) {
|
||||||
if (stream->InvalidPesCounter > 1) {
|
if (stream->InvalidPesCounter > 1) {
|
||||||
Error(_("[softhddev] %d invalid PES video packet(s)\n"),
|
Error(_("[softhddev] %d invalid PES video packet(s)\n"), stream->InvalidPesCounter);
|
||||||
stream->InvalidPesCounter);
|
|
||||||
}
|
}
|
||||||
stream->InvalidPesCounter = 0;
|
stream->InvalidPesCounter = 0;
|
||||||
}
|
}
|
||||||
@ -2223,7 +2198,8 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
|
|||||||
}
|
}
|
||||||
#ifdef USE_SOFTLIMIT
|
#ifdef USE_SOFTLIMIT
|
||||||
// soft limit buffer full
|
// soft limit buffer full
|
||||||
if (AudioSyncStream == stream && atomic_read(&stream->PacketsFilled) > 3 && AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE * 2) {
|
if (AudioSyncStream == stream && atomic_read(&stream->PacketsFilled) > 3
|
||||||
|
&& AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE * 2) {
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
@ -2231,14 +2207,17 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
|
|||||||
pts = AV_NOPTS_VALUE;
|
pts = AV_NOPTS_VALUE;
|
||||||
dts = AV_NOPTS_VALUE;
|
dts = AV_NOPTS_VALUE;
|
||||||
if ((data[7] & 0xc0) == 0x80) {
|
if ((data[7] & 0xc0) == 0x80) {
|
||||||
pts = (int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] &
|
pts =
|
||||||
0xFE) << 14 | data[12] << 7 | (data[13] & 0xFE) >> 1;
|
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] & 0xFE) << 14 | data[12] << 7 | (data[13] &
|
||||||
|
0xFE) >> 1;
|
||||||
}
|
}
|
||||||
if ((data[7] & 0xC0) == 0xc0) {
|
if ((data[7] & 0xC0) == 0xc0) {
|
||||||
pts = (int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] &
|
pts =
|
||||||
0xFE) << 14 | data[12] << 7 | (data[13] & 0xFE) >> 1;
|
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] & 0xFE) << 14 | data[12] << 7 | (data[13] &
|
||||||
dts = (int64_t) (data[14] & 0x0E) << 29 | data[15] << 22 | (data[16] &
|
0xFE) >> 1;
|
||||||
0xFE) << 14 | data[17] << 7 | (data[18] & 0xFE) >> 1;
|
dts =
|
||||||
|
(int64_t) (data[14] & 0x0E) << 29 | data[15] << 22 | (data[16] & 0xFE) << 14 | data[17] << 7 | (data[18] &
|
||||||
|
0xFE) >> 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
check = data + 9 + n;
|
check = data + 9 + n;
|
||||||
@ -2266,11 +2245,8 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
|
|||||||
int fd;
|
int fd;
|
||||||
static int FrameCounter;
|
static int FrameCounter;
|
||||||
|
|
||||||
snprintf(buf, sizeof(buf), "frame_%06d_%08d.raw", getpid(),
|
snprintf(buf, sizeof(buf), "frame_%06d_%08d.raw", getpid(), FrameCounter++);
|
||||||
FrameCounter++);
|
if ((fd = open(buf, O_WRONLY | O_CLOEXEC | O_CREAT | O_TRUNC, 0666)) >= 0) {
|
||||||
if ((fd =
|
|
||||||
open(buf, O_WRONLY | O_CLOEXEC | O_CREAT | O_TRUNC,
|
|
||||||
0666)) >= 0) {
|
|
||||||
if (write(fd, data + 9 + n, size - 9 - n)) {
|
if (write(fd, data + 9 + n, size - 9 - n)) {
|
||||||
// this construct is to remove the annoying warning
|
// this construct is to remove the annoying warning
|
||||||
}
|
}
|
||||||
@ -2413,8 +2389,7 @@ extern uint8_t *CreateJpeg(uint8_t *, int *, int, int, int);
|
|||||||
**
|
**
|
||||||
** @returns allocated jpeg image.
|
** @returns allocated jpeg image.
|
||||||
*/
|
*/
|
||||||
uint8_t *CreateJpeg(uint8_t * image, int raw_size, int *size, int quality,
|
uint8_t *CreateJpeg(uint8_t * image, int raw_size, int *size, int quality, int width, int height)
|
||||||
int width, int height)
|
|
||||||
{
|
{
|
||||||
struct jpeg_compress_struct cinfo;
|
struct jpeg_compress_struct cinfo;
|
||||||
struct jpeg_error_mgr jerr;
|
struct jpeg_error_mgr jerr;
|
||||||
@ -2492,7 +2467,7 @@ uint8_t *GrabImage(int *size, int jpeg, int quality, int width, int height)
|
|||||||
*/
|
*/
|
||||||
int SetPlayMode(int play_mode)
|
int SetPlayMode(int play_mode)
|
||||||
{
|
{
|
||||||
Debug(3,"Set Playmode %d\n",play_mode);
|
Debug(3, "Set Playmode %d\n", play_mode);
|
||||||
switch (play_mode) {
|
switch (play_mode) {
|
||||||
case 0: // audio/video from decoder
|
case 0: // audio/video from decoder
|
||||||
// tell video parser we get new stream
|
// tell video parser we get new stream
|
||||||
@ -2569,8 +2544,7 @@ void GetVideoSize(int *width, int *height, double *aspect)
|
|||||||
int aspect_den;
|
int aspect_den;
|
||||||
|
|
||||||
if (MyVideoStream->HwDecoder) {
|
if (MyVideoStream->HwDecoder) {
|
||||||
VideoGetVideoSize(MyVideoStream->HwDecoder, width, height, &aspect_num,
|
VideoGetVideoSize(MyVideoStream->HwDecoder, width, height, &aspect_num, &aspect_den);
|
||||||
&aspect_den);
|
|
||||||
*aspect = (double)aspect_num / (double)aspect_den;
|
*aspect = (double)aspect_num / (double)aspect_den;
|
||||||
} else {
|
} else {
|
||||||
*width = 0;
|
*width = 0;
|
||||||
@ -2580,8 +2554,7 @@ void GetVideoSize(int *width, int *height, double *aspect)
|
|||||||
|
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
if (done_width != *width || done_height != *height) {
|
if (done_width != *width || done_height != *height) {
|
||||||
Debug(3, "[softhddev]%s: %dx%d %g\n", __FUNCTION__, *width, *height,
|
Debug(3, "[softhddev]%s: %dx%d %g\n", __FUNCTION__, *width, *height, *aspect);
|
||||||
*aspect);
|
|
||||||
done_width = *width;
|
done_width = *width;
|
||||||
done_height = *height;
|
done_height = *height;
|
||||||
}
|
}
|
||||||
@ -2629,8 +2602,7 @@ void Clear(void)
|
|||||||
for (i = 0; MyVideoStream->ClearBuffers && i < 20; ++i) {
|
for (i = 0; MyVideoStream->ClearBuffers && i < 20; ++i) {
|
||||||
usleep(1 * 100);
|
usleep(1 * 100);
|
||||||
}
|
}
|
||||||
Debug(3, "[softhddev]%s: %dms buffers %d\n", __FUNCTION__, i,
|
Debug(3, "[softhddev]%s: %dms buffers %d\n", __FUNCTION__, i, VideoGetBuffers(MyVideoStream));
|
||||||
VideoGetBuffers(MyVideoStream));
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -2695,7 +2667,6 @@ void StillPicture(const uint8_t * data, int size)
|
|||||||
|
|
||||||
VideoNextPacket(MyVideoStream, AV_CODEC_ID_NONE); // close last stream
|
VideoNextPacket(MyVideoStream, AV_CODEC_ID_NONE); // close last stream
|
||||||
|
|
||||||
|
|
||||||
if (MyVideoStream->CodecID == AV_CODEC_ID_NONE) {
|
if (MyVideoStream->CodecID == AV_CODEC_ID_NONE) {
|
||||||
// FIXME: should detect codec, see PlayVideo
|
// FIXME: should detect codec, see PlayVideo
|
||||||
Error(_("[softhddev] no codec known for still picture\n"));
|
Error(_("[softhddev] no codec known for still picture\n"));
|
||||||
@ -2748,12 +2719,12 @@ void StillPicture(const uint8_t * data, int size)
|
|||||||
VideoNextPacket(MyVideoStream, AV_CODEC_ID_NONE); // close last stream
|
VideoNextPacket(MyVideoStream, AV_CODEC_ID_NONE); // close last stream
|
||||||
MyVideoStream->CodecID = AV_CODEC_ID_MPEG2VIDEO;
|
MyVideoStream->CodecID = AV_CODEC_ID_MPEG2VIDEO;
|
||||||
}
|
}
|
||||||
VideoEnqueue(MyVideoStream, AV_NOPTS_VALUE,AV_NOPTS_VALUE, data, size);
|
VideoEnqueue(MyVideoStream, AV_NOPTS_VALUE, AV_NOPTS_VALUE, data, size);
|
||||||
}
|
}
|
||||||
if (MyVideoStream->CodecID == AV_CODEC_ID_H264) {
|
if (MyVideoStream->CodecID == AV_CODEC_ID_H264) {
|
||||||
VideoEnqueue(MyVideoStream, AV_NOPTS_VALUE, AV_NOPTS_VALUE,seq_end_h264,sizeof(seq_end_h264));
|
VideoEnqueue(MyVideoStream, AV_NOPTS_VALUE, AV_NOPTS_VALUE, seq_end_h264, sizeof(seq_end_h264));
|
||||||
} else if (MyVideoStream->CodecID == AV_CODEC_ID_HEVC) {
|
} else if (MyVideoStream->CodecID == AV_CODEC_ID_HEVC) {
|
||||||
VideoEnqueue(MyVideoStream, AV_NOPTS_VALUE, AV_NOPTS_VALUE,seq_end_h265,sizeof(seq_end_h265));
|
VideoEnqueue(MyVideoStream, AV_NOPTS_VALUE, AV_NOPTS_VALUE, seq_end_h265, sizeof(seq_end_h265));
|
||||||
} else {
|
} else {
|
||||||
VideoEnqueue(MyVideoStream, AV_NOPTS_VALUE, AV_NOPTS_VALUE, seq_end_mpeg, sizeof(seq_end_mpeg));
|
VideoEnqueue(MyVideoStream, AV_NOPTS_VALUE, AV_NOPTS_VALUE, seq_end_mpeg, sizeof(seq_end_mpeg));
|
||||||
}
|
}
|
||||||
@ -2802,8 +2773,7 @@ int Poll(int timeout)
|
|||||||
filled = atomic_read(&MyVideoStream->PacketsFilled);
|
filled = atomic_read(&MyVideoStream->PacketsFilled);
|
||||||
// soft limit + hard limit
|
// soft limit + hard limit
|
||||||
full = (used > AUDIO_MIN_BUFFER_FREE && filled > 3)
|
full = (used > AUDIO_MIN_BUFFER_FREE && filled > 3)
|
||||||
|| AudioFreeBytes() < AUDIO_MIN_BUFFER_FREE
|
|| AudioFreeBytes() < AUDIO_MIN_BUFFER_FREE || filled >= VIDEO_PACKET_MAX - 10;
|
||||||
|| filled >= VIDEO_PACKET_MAX - 10;
|
|
||||||
|
|
||||||
if (!full || !timeout) {
|
if (!full || !timeout) {
|
||||||
return !full;
|
return !full;
|
||||||
@ -2857,8 +2827,7 @@ void GetOsdSize(int *width, int *height, double *aspect)
|
|||||||
|
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
if (done_width != *width || done_height != *height) {
|
if (done_width != *width || done_height != *height) {
|
||||||
Debug(3, "[softhddev]%s: %dx%d %g\n", __FUNCTION__, *width, *height,
|
Debug(3, "[softhddev]%s: %dx%d %g\n", __FUNCTION__, *width, *height, *aspect);
|
||||||
*aspect);
|
|
||||||
done_width = *width;
|
done_width = *width;
|
||||||
done_height = *height;
|
done_height = *height;
|
||||||
}
|
}
|
||||||
@ -2885,8 +2854,7 @@ void OsdClose(void)
|
|||||||
** @param x x-coordinate on screen of argb image
|
** @param x x-coordinate on screen of argb image
|
||||||
** @param y y-coordinate on screen of argb image
|
** @param y y-coordinate on screen of argb image
|
||||||
*/
|
*/
|
||||||
void OsdDrawARGB(int xi, int yi, int height, int width, int pitch,
|
void OsdDrawARGB(int xi, int yi, int height, int width, int pitch, const uint8_t * argb, int x, int y)
|
||||||
const uint8_t * argb, int x, int y)
|
|
||||||
{
|
{
|
||||||
// wakeup display for showing remote learning dialog
|
// wakeup display for showing remote learning dialog
|
||||||
VideoDisplayWakeup();
|
VideoDisplayWakeup();
|
||||||
@ -2902,15 +2870,11 @@ const char *CommandLineHelp(void)
|
|||||||
{
|
{
|
||||||
return " -a device\taudio device (fe. alsa: hw:0,0 oss: /dev/dsp)\n"
|
return " -a device\taudio device (fe. alsa: hw:0,0 oss: /dev/dsp)\n"
|
||||||
" -p device\taudio device for pass-through (hw:0,1 or /dev/dsp1)\n"
|
" -p device\taudio device for pass-through (hw:0,1 or /dev/dsp1)\n"
|
||||||
" -c channel\taudio mixer channel name (fe. PCM)\n"
|
" -c channel\taudio mixer channel name (fe. PCM)\n" " -d display\tdisplay of x11 server (fe. :0.0)\n"
|
||||||
" -d display\tdisplay of x11 server (fe. :0.0)\n"
|
|
||||||
" -f\t\tstart with fullscreen window (only with window manager)\n"
|
" -f\t\tstart with fullscreen window (only with window manager)\n"
|
||||||
" -g geometry\tx11 window geometry wxh+x+y\n"
|
" -g geometry\tx11 window geometry wxh+x+y\n" " -v device\tvideo driver device (cuvid)\n"
|
||||||
" -v device\tvideo driver device (cuvid)\n"
|
" -s\t\tstart in suspended mode\n" " -x\t\tstart x11 server, with -xx try to connect, if this fails\n"
|
||||||
" -s\t\tstart in suspended mode\n"
|
" -X args\tX11 server arguments (f.e. -nocursor)\n" " -w workaround\tenable/disable workarounds\n"
|
||||||
" -x\t\tstart x11 server, with -xx try to connect, if this fails\n"
|
|
||||||
" -X args\tX11 server arguments (f.e. -nocursor)\n"
|
|
||||||
" -w workaround\tenable/disable workarounds\n"
|
|
||||||
"\tno-hw-decoder\t\tdisable hw decoder, use software decoder only\n"
|
"\tno-hw-decoder\t\tdisable hw decoder, use software decoder only\n"
|
||||||
"\tno-mpeg-hw-decoder\tdisable hw decoder for mpeg only\n"
|
"\tno-mpeg-hw-decoder\tdisable hw decoder for mpeg only\n"
|
||||||
"\tstill-hw-decoder\tenable hardware decoder for still-pictures\n"
|
"\tstill-hw-decoder\tenable hardware decoder for still-pictures\n"
|
||||||
@ -2919,8 +2883,7 @@ const char *CommandLineHelp(void)
|
|||||||
"\talsa-no-close-open\tdisable close open to fix alsa no sound bug\n"
|
"\talsa-no-close-open\tdisable close open to fix alsa no sound bug\n"
|
||||||
"\talsa-close-open-delay\tenable close open delay to fix no sound bug\n"
|
"\talsa-close-open-delay\tenable close open delay to fix no sound bug\n"
|
||||||
"\tignore-repeat-pict\tdisable repeat pict message\n"
|
"\tignore-repeat-pict\tdisable repeat pict message\n"
|
||||||
"\tuse-possible-defect-frames prefer faster channel switch\n"
|
"\tuse-possible-defect-frames prefer faster channel switch\n" " -D\t\tstart in detached mode\n";
|
||||||
" -D\t\tstart in detached mode\n";
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -2961,8 +2924,7 @@ int ProcessArgs(int argc, char *const argv[])
|
|||||||
case 'g': // geometry
|
case 'g': // geometry
|
||||||
if (VideoSetGeometry(optarg) < 0) {
|
if (VideoSetGeometry(optarg) < 0) {
|
||||||
fprintf(stderr,
|
fprintf(stderr,
|
||||||
_
|
_("Bad formated geometry please use: [=][<width>{xX}<height>][{+-}<xoffset>{+-}<yoffset>]\n"));
|
||||||
("Bad formated geometry please use: [=][<width>{xX}<height>][{+-}<xoffset>{+-}<yoffset>]\n"));
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
@ -3004,8 +2966,7 @@ int ProcessArgs(int argc, char *const argv[])
|
|||||||
} else if (!strcasecmp("use-possible-defect-frames", optarg)) {
|
} else if (!strcasecmp("use-possible-defect-frames", optarg)) {
|
||||||
CodecUsePossibleDefectFrames = 1;
|
CodecUsePossibleDefectFrames = 1;
|
||||||
} else {
|
} else {
|
||||||
fprintf(stderr, _("Workaround '%s' unsupported\n"),
|
fprintf(stderr, _("Workaround '%s' unsupported\n"), optarg);
|
||||||
optarg);
|
|
||||||
return 0;
|
return 0;
|
||||||
}
|
}
|
||||||
continue;
|
continue;
|
||||||
@ -3015,8 +2976,7 @@ int ProcessArgs(int argc, char *const argv[])
|
|||||||
fprintf(stderr, _("We need no long options\n"));
|
fprintf(stderr, _("We need no long options\n"));
|
||||||
return 0;
|
return 0;
|
||||||
case ':':
|
case ':':
|
||||||
fprintf(stderr, _("Missing argument for option '%c'\n"),
|
fprintf(stderr, _("Missing argument for option '%c'\n"), optopt);
|
||||||
optopt);
|
|
||||||
return 0;
|
return 0;
|
||||||
default:
|
default:
|
||||||
fprintf(stderr, _("Unknown option '%c'\n"), optopt);
|
fprintf(stderr, _("Unknown option '%c'\n"), optopt);
|
||||||
@ -3052,7 +3012,7 @@ static pid_t X11ServerPid; ///< x11 server pid
|
|||||||
**
|
**
|
||||||
** @param sig signal number
|
** @param sig signal number
|
||||||
*/
|
*/
|
||||||
static void Usr1Handler(int __attribute__ ((unused)) sig)
|
static void Usr1Handler(int __attribute__((unused)) sig)
|
||||||
{
|
{
|
||||||
++Usr1Signal;
|
++Usr1Signal;
|
||||||
|
|
||||||
@ -3116,8 +3076,7 @@ static void StartXServer(void)
|
|||||||
usr1.sa_handler = Usr1Handler;
|
usr1.sa_handler = Usr1Handler;
|
||||||
sigaction(SIGUSR1, &usr1, NULL);
|
sigaction(SIGUSR1, &usr1, NULL);
|
||||||
|
|
||||||
Debug(3, "x-setup: Starting X server '%s' '%s'\n", args[0],
|
Debug(3, "x-setup: Starting X server '%s' '%s'\n", args[0], X11ServerArguments);
|
||||||
X11ServerArguments);
|
|
||||||
// fork
|
// fork
|
||||||
if ((pid = fork())) { // parent
|
if ((pid = fork())) { // parent
|
||||||
|
|
||||||
@ -3189,12 +3148,10 @@ void SoftHdDeviceExit(void)
|
|||||||
kill(X11ServerPid, SIGKILL);
|
kill(X11ServerPid, SIGKILL);
|
||||||
} while (waittime < timeout);
|
} while (waittime < timeout);
|
||||||
if (wpid && WIFEXITED(status)) {
|
if (wpid && WIFEXITED(status)) {
|
||||||
Debug(3, "x-setup: x11 server exited (%d)\n",
|
Debug(3, "x-setup: x11 server exited (%d)\n", WEXITSTATUS(status));
|
||||||
WEXITSTATUS(status));
|
|
||||||
}
|
}
|
||||||
if (wpid && WIFSIGNALED(status)) {
|
if (wpid && WIFSIGNALED(status)) {
|
||||||
Debug(3, "x-setup: x11 server killed (%d)\n",
|
Debug(3, "x-setup: x11 server killed (%d)\n", WTERMSIG(status));
|
||||||
WTERMSIG(status));
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -3247,8 +3204,7 @@ int Start(void)
|
|||||||
PesInit(PesDemuxAudio);
|
PesInit(PesDemuxAudio);
|
||||||
#endif
|
#endif
|
||||||
Info(_("[softhddev] ready%s\n"),
|
Info(_("[softhddev] ready%s\n"),
|
||||||
ConfigStartSuspended ? ConfigStartSuspended ==
|
ConfigStartSuspended ? ConfigStartSuspended == -1 ? " detached" : " suspended" : "");
|
||||||
-1 ? " detached" : " suspended" : "");
|
|
||||||
|
|
||||||
return ConfigStartSuspended;
|
return ConfigStartSuspended;
|
||||||
}
|
}
|
||||||
@ -3281,12 +3237,10 @@ void Housekeeping(void)
|
|||||||
wpid = waitpid(X11ServerPid, &status, WNOHANG);
|
wpid = waitpid(X11ServerPid, &status, WNOHANG);
|
||||||
if (wpid) {
|
if (wpid) {
|
||||||
if (WIFEXITED(status)) {
|
if (WIFEXITED(status)) {
|
||||||
Debug(3, "x-setup: x11 server exited (%d)\n",
|
Debug(3, "x-setup: x11 server exited (%d)\n", WEXITSTATUS(status));
|
||||||
WEXITSTATUS(status));
|
|
||||||
}
|
}
|
||||||
if (WIFSIGNALED(status)) {
|
if (WIFSIGNALED(status)) {
|
||||||
Debug(3, "x-setup: x11 server killed (%d)\n",
|
Debug(3, "x-setup: x11 server killed (%d)\n", WTERMSIG(status));
|
||||||
WTERMSIG(status));
|
|
||||||
}
|
}
|
||||||
X11ServerPid = 0;
|
X11ServerPid = 0;
|
||||||
// video not running
|
// video not running
|
||||||
@ -3452,8 +3406,7 @@ void ScaleVideo(int x, int y, int width, int height)
|
|||||||
** @param pip_width pip window width OSD relative
|
** @param pip_width pip window width OSD relative
|
||||||
** @param pip_height pip window height OSD relative
|
** @param pip_height pip window height OSD relative
|
||||||
*/
|
*/
|
||||||
void PipSetPosition(int x, int y, int width, int height, int pip_x, int pip_y,
|
void PipSetPosition(int x, int y, int width, int height, int pip_x, int pip_y, int pip_width, int pip_height)
|
||||||
int pip_width, int pip_height)
|
|
||||||
{
|
{
|
||||||
if (!MyVideoStream->HwDecoder) { // video not running
|
if (!MyVideoStream->HwDecoder) { // video not running
|
||||||
return;
|
return;
|
||||||
@ -3463,8 +3416,7 @@ void PipSetPosition(int x, int y, int width, int height, int pip_x, int pip_y,
|
|||||||
if (!PipVideoStream->HwDecoder) { // pip not running
|
if (!PipVideoStream->HwDecoder) { // pip not running
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
VideoSetOutputPosition(PipVideoStream->HwDecoder, pip_x, pip_y, pip_width,
|
VideoSetOutputPosition(PipVideoStream->HwDecoder, pip_x, pip_y, pip_width, pip_height);
|
||||||
pip_height);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
@ -3479,8 +3431,7 @@ void PipSetPosition(int x, int y, int width, int height, int pip_x, int pip_y,
|
|||||||
** @param pip_width pip window width OSD relative
|
** @param pip_width pip window width OSD relative
|
||||||
** @param pip_height pip window height OSD relative
|
** @param pip_height pip window height OSD relative
|
||||||
*/
|
*/
|
||||||
void PipStart(int x, int y, int width, int height, int pip_x, int pip_y,
|
void PipStart(int x, int y, int width, int height, int pip_x, int pip_y, int pip_width, int pip_height)
|
||||||
int pip_width, int pip_height)
|
|
||||||
{
|
{
|
||||||
if (!MyVideoStream->HwDecoder) { // video not running
|
if (!MyVideoStream->HwDecoder) { // video not running
|
||||||
return;
|
return;
|
||||||
|
@ -25,8 +25,7 @@ extern "C"
|
|||||||
{
|
{
|
||||||
#endif
|
#endif
|
||||||
/// C callback feed key press
|
/// C callback feed key press
|
||||||
extern void FeedKeyPress(const char *, const char *, int, int,
|
extern void FeedKeyPress(const char *, const char *, int, int, const char *);
|
||||||
const char *);
|
|
||||||
|
|
||||||
/// C plugin get osd size and ascpect
|
/// C plugin get osd size and ascpect
|
||||||
extern void GetOsdSize(int *, int *, double *);
|
extern void GetOsdSize(int *, int *, double *);
|
||||||
@ -34,8 +33,7 @@ extern "C"
|
|||||||
/// C plugin close osd
|
/// C plugin close osd
|
||||||
extern void OsdClose(void);
|
extern void OsdClose(void);
|
||||||
/// C plugin draw osd pixmap
|
/// C plugin draw osd pixmap
|
||||||
extern void OsdDrawARGB(int, int, int, int, int, const uint8_t *, int,
|
extern void OsdDrawARGB(int, int, int, int, int, const uint8_t *, int, int);
|
||||||
int);
|
|
||||||
|
|
||||||
/// C plugin play audio packet
|
/// C plugin play audio packet
|
||||||
extern int PlayAudio(const uint8_t *, int, uint8_t);
|
extern int PlayAudio(const uint8_t *, int, uint8_t);
|
||||||
|
15
video.h
15
video.h
@ -62,20 +62,17 @@ extern unsigned VideoGetSurface(VideoHwDecoder *, const AVCodecContext *);
|
|||||||
extern void VideoReleaseSurface(VideoHwDecoder *, unsigned);
|
extern void VideoReleaseSurface(VideoHwDecoder *, unsigned);
|
||||||
|
|
||||||
/// Callback to negotiate the PixelFormat.
|
/// Callback to negotiate the PixelFormat.
|
||||||
extern enum AVPixelFormat Video_get_format(VideoHwDecoder *, AVCodecContext *,
|
extern enum AVPixelFormat Video_get_format(VideoHwDecoder *, AVCodecContext *, const enum AVPixelFormat *);
|
||||||
const enum AVPixelFormat *);
|
|
||||||
|
|
||||||
/// Render a ffmpeg frame.
|
/// Render a ffmpeg frame.
|
||||||
extern void VideoRenderFrame(VideoHwDecoder *, const AVCodecContext *,
|
extern void VideoRenderFrame(VideoHwDecoder *, const AVCodecContext *, const AVFrame *);
|
||||||
const AVFrame *);
|
|
||||||
|
|
||||||
/// Get hwaccel context for ffmpeg.
|
/// Get hwaccel context for ffmpeg.
|
||||||
extern void *VideoGetHwAccelContext(VideoHwDecoder *);
|
extern void *VideoGetHwAccelContext(VideoHwDecoder *);
|
||||||
|
|
||||||
#ifdef AVCODEC_VDPAU_H
|
#ifdef AVCODEC_VDPAU_H
|
||||||
/// Draw vdpau render state.
|
/// Draw vdpau render state.
|
||||||
extern void VideoDrawRenderState(VideoHwDecoder *,
|
extern void VideoDrawRenderState(VideoHwDecoder *, struct vdpau_render_state *);
|
||||||
struct vdpau_render_state *);
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
#ifdef USE_OPENGLOSD
|
#ifdef USE_OPENGLOSD
|
||||||
@ -189,8 +186,7 @@ extern void VideoSetAutoCrop(int, int, int);
|
|||||||
extern void VideoOsdClear(void);
|
extern void VideoOsdClear(void);
|
||||||
|
|
||||||
/// Draw an OSD ARGB image.
|
/// Draw an OSD ARGB image.
|
||||||
extern void VideoOsdDrawARGB(int, int, int, int, int, const uint8_t *, int,
|
extern void VideoOsdDrawARGB(int, int, int, int, int, const uint8_t *, int, int);
|
||||||
int);
|
|
||||||
|
|
||||||
/// Get OSD size.
|
/// Get OSD size.
|
||||||
extern void VideoGetOsdSize(int *, int *);
|
extern void VideoGetOsdSize(int *, int *);
|
||||||
@ -248,8 +244,9 @@ extern void SetDPMSatBlackScreen(int);
|
|||||||
|
|
||||||
/// Raise the frontend window
|
/// Raise the frontend window
|
||||||
extern int VideoRaiseWindow(void);
|
extern int VideoRaiseWindow(void);
|
||||||
|
|
||||||
#ifdef USE_OPENGLOSD
|
#ifdef USE_OPENGLOSD
|
||||||
extern void ActivateOsd(GLuint,int,int,int,int);
|
extern void ActivateOsd(GLuint, int, int, int, int);
|
||||||
#endif
|
#endif
|
||||||
#if 0
|
#if 0
|
||||||
long int gettid()
|
long int gettid()
|
||||||
|
Loading…
Reference in New Issue
Block a user