1
0
mirror of https://github.com/jojo61/vdr-plugin-softhdcuvid.git synced 2023-10-10 13:37:41 +02:00

Merge branch 'master' into Switch-to-posix-compaatible-sched_yield

This commit is contained in:
9000h 2019-10-28 15:51:49 +01:00 committed by GitHub
commit 1bf5a841e9
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
24 changed files with 10557 additions and 10663 deletions

37
.indent.pro vendored Normal file
View File

@ -0,0 +1,37 @@
--blank-lines-before-block-comments
--blank-lines-after-declarations
--blank-lines-after-procedures
--no-blank-lines-after-commas
--braces-on-if-line
--no-blank-before-sizeof
--comment-indentation41
--declaration-comment-column41
--no-comment-delimiters-on-blank-lines
--swallow-optional-blank-lines
--dont-format-comments
--parameter-indentation4
--indent-level4
--line-comments-indentation0
--cuddle-else
--cuddle-do-while
--brace-indent0
--case-brace-indentation0
//--start-left-side-of-comments
--leave-preprocessor-space
//--continuation-indentation8
--case-indentation4
--else-endif-column0
--no-space-after-casts
--declaration-indentation1
--dont-line-up-parentheses
--no-space-after-function-call-names
--space-special-semicolon
--tab-size4
--no-tabs
--line-length119
--comment-line-length119
--honour-newlines
--dont-break-procedure-type
--break-before-boolean-operator
--continuation-indentation4
--ignore-newlines

View File

@ -348,8 +348,6 @@ HDRS= $(wildcard *.h)
indent:
for i in $(SRCS) $(HDRS); do \
indent $$i; \
unexpand -a $$i | sed -e s/constconst/const/ > $$i.up; \
mv $$i.up $$i; \
done
video_test: video.c Makefile

335
audio.c
View File

@ -260,8 +260,7 @@ static void AudioNormalizer(int16_t * samples, int count)
if (avg > 0) {
factor = ((INT16_MAX / 8) * 1000U) / (uint32_t) sqrt(avg);
// smooth normalize
AudioNormalizeFactor =
(AudioNormalizeFactor * 500 + factor * 500) / 1000;
AudioNormalizeFactor = (AudioNormalizeFactor * 500 + factor * 500) / 1000;
if (AudioNormalizeFactor < AudioMinNormalize) {
AudioNormalizeFactor = AudioMinNormalize;
}
@ -271,8 +270,8 @@ static void AudioNormalizer(int16_t * samples, int count)
} else {
factor = 1000;
}
Debug(4, "audio/noramlize: avg %8d, fac=%6.3f, norm=%6.3f\n",
avg, factor / 1000.0, AudioNormalizeFactor / 1000.0);
Debug(4, "audio/noramlize: avg %8d, fac=%6.3f, norm=%6.3f\n", avg, factor / 1000.0,
AudioNormalizeFactor / 1000.0);
}
AudioNormIndex = (AudioNormIndex + 1) % AudioNormMaxIndex;
@ -339,8 +338,7 @@ static void AudioCompressor(int16_t * samples, int count)
if (max_sample > 0) {
factor = (INT16_MAX * 1000) / max_sample;
// smooth compression (FIXME: make configurable?)
AudioCompressionFactor =
(AudioCompressionFactor * 950 + factor * 50) / 1000;
AudioCompressionFactor = (AudioCompressionFactor * 950 + factor * 50) / 1000;
if (AudioCompressionFactor > factor) {
AudioCompressionFactor = factor; // no clipping
}
@ -351,8 +349,8 @@ static void AudioCompressor(int16_t * samples, int count)
return; // silent nothing todo
}
Debug(4, "audio/compress: max %5d, fac=%6.3f, com=%6.3f\n", max_sample,
factor / 1000.0, AudioCompressionFactor / 1000.0);
Debug(4, "audio/compress: max %5d, fac=%6.3f, com=%6.3f\n", max_sample, factor / 1000.0,
AudioCompressionFactor / 1000.0);
// apply compression factor
for (i = 0; i < count / AudioBytesProSample; ++i) {
@ -460,8 +458,7 @@ static void AudioStereo2Mono(const int16_t * in, int frames, int16_t * out)
** @param frames number of frames in sample buffer
** @param out output sample buffer
*/
static void AudioSurround2Stereo(const int16_t * in, int in_chan, int frames,
int16_t * out)
static void AudioSurround2Stereo(const int16_t * in, int in_chan, int frames, int16_t * out)
{
while (frames--) {
int l;
@ -540,8 +537,7 @@ static void AudioSurround2Stereo(const int16_t * in, int in_chan, int frames,
** @param out output sample buffer
** @param out_chan nr. of output channels
*/
static void AudioUpmix(const int16_t * in, int in_chan, int frames,
int16_t * out, int out_chan)
static void AudioUpmix(const int16_t * in, int in_chan, int frames, int16_t * out, int out_chan)
{
while (frames--) {
int i;
@ -571,8 +567,7 @@ static void AudioUpmix(const int16_t * in, int in_chan, int frames,
** @param out output sample buffer
** @param out_chan nr. of output channels
*/
static void AudioResample(const int16_t * in, int in_chan, int frames,
int16_t * out, int out_chan)
static void AudioResample(const int16_t * in, int in_chan, int frames, int16_t * out, int out_chan)
{
switch (in_chan * 8 + out_chan) {
case 1 * 8 + 1:
@ -607,8 +602,7 @@ static void AudioResample(const int16_t * in, int in_chan, int frames,
break;
default:
Error("audio: unsupported %d -> %d channels resample\n", in_chan,
out_chan);
Error("audio: unsupported %d -> %d channels resample\n", in_chan, out_chan);
// play silence
memset(out, 0, frames * out_chan * AudioBytesProSample);
break;
@ -697,8 +691,7 @@ static int AudioRingAdd(unsigned sample_rate, int channels, int passthrough)
AudioRing[AudioRingWrite].PTS = INT64_C(0x8000000000000000);
RingBufferReset(AudioRing[AudioRingWrite].RingBuffer);
Debug(3, "audio: %d ring buffer prepared\n",
atomic_read(&AudioRingFilled) + 1);
Debug(3, "audio: %d ring buffer prepared\n", atomic_read(&AudioRingFilled) + 1);
atomic_inc(&AudioRingFilled);
@ -796,14 +789,12 @@ static int AlsaPlayRingbuffer(void)
if (n == -EAGAIN) {
continue;
}
Warning(_("audio/alsa: avail underrun error? '%s'\n"),
snd_strerror(n));
Warning(_("audio/alsa: avail underrun error? '%s'\n"), snd_strerror(n));
err = snd_pcm_recover(AlsaPCMHandle, n, 0);
if (err >= 0) {
continue;
}
Error(_("audio/alsa: snd_pcm_avail_update(): %s\n"),
snd_strerror(n));
Error(_("audio/alsa: snd_pcm_avail_update(): %s\n"), snd_strerror(n));
return -1;
}
avail = snd_pcm_frames_to_bytes(AlsaPCMHandle, n);
@ -812,23 +803,20 @@ static int AlsaPlayRingbuffer(void)
// happens with broken alsa drivers
if (AudioThread) {
if (!AudioAlsaDriverBroken) {
Error(_("audio/alsa: broken driver %d state '%s'\n"),
avail,
Error(_("audio/alsa: broken driver %d state '%s'\n"), avail,
snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle)));
}
// try to recover
if (snd_pcm_state(AlsaPCMHandle)
== SND_PCM_STATE_PREPARED) {
if ((err = snd_pcm_start(AlsaPCMHandle)) < 0) {
Error(_("audio/alsa: snd_pcm_start(): %s\n"),
snd_strerror(err));
Error(_("audio/alsa: snd_pcm_start(): %s\n"), snd_strerror(err));
}
}
usleep(5 * 1000);
}
}
Debug(4, "audio/alsa: break state '%s'\n",
snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle)));
Debug(4, "audio/alsa: break state '%s'\n", snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle)));
break;
}
@ -849,8 +837,7 @@ static int AlsaPlayRingbuffer(void)
break;
}
// muting pass-through AC-3, can produce disturbance
if (AudioMute || (AudioSoftVolume
&& !AudioRing[AudioRingRead].Passthrough)) {
if (AudioMute || (AudioSoftVolume && !AudioRing[AudioRingRead].Passthrough)) {
// FIXME: quick&dirty cast
AudioSoftAmplifier((int16_t *) p, avail);
// FIXME: if not all are written, we double amplify them
@ -879,14 +866,12 @@ static int AlsaPlayRingbuffer(void)
goto again;
}
*/
Warning(_("audio/alsa: writei underrun error? '%s'\n"),
snd_strerror(err));
Warning(_("audio/alsa: writei underrun error? '%s'\n"), snd_strerror(err));
err = snd_pcm_recover(AlsaPCMHandle, err, 0);
if (err >= 0) {
continue;
}
Error(_("audio/alsa: snd_pcm_writei failed: %s\n"),
snd_strerror(err));
Error(_("audio/alsa: snd_pcm_writei failed: %s\n"), snd_strerror(err));
return -1;
}
// this could happen, if underrun happened
@ -898,7 +883,6 @@ static int AlsaPlayRingbuffer(void)
RingBufferReadAdvance(AudioRing[AudioRingRead].RingBuffer, avail);
first = 0;
}
return 0;
}
@ -955,8 +939,7 @@ static int AlsaThread(void)
}
// wait for space in kernel buffers
if ((err = snd_pcm_wait(AlsaPCMHandle, 24)) < 0) {
Warning(_("audio/alsa: wait underrun error? '%s'\n"),
snd_strerror(err));
Warning(_("audio/alsa: wait underrun error? '%s'\n"), snd_strerror(err));
err = snd_pcm_recover(AlsaPCMHandle, err, 0);
if (err >= 0) {
continue;
@ -980,8 +963,7 @@ static int AlsaThread(void)
state = snd_pcm_state(AlsaPCMHandle);
if (state != SND_PCM_STATE_RUNNING) {
Debug(3, "audio/alsa: stopping play '%s'\n",
snd_pcm_state_name(state));
Debug(3, "audio/alsa: stopping play '%s'\n", snd_pcm_state_name(state));
return 0;
}
@ -1035,8 +1017,7 @@ static snd_pcm_t *AlsaOpenPCM(int passthrough)
#endif
}
// open none blocking; if device is already used, we don't want wait
if ((err =
snd_pcm_open(&handle, device, SND_PCM_STREAM_PLAYBACK,SND_PCM_NONBLOCK)) < 0) {
if ((err = snd_pcm_open(&handle, device, SND_PCM_STREAM_PLAYBACK, SND_PCM_NONBLOCK)) < 0) {
Error(_("audio/alsa: playback open '%s' error: %s\n"), device, snd_strerror(err));
return NULL;
}
@ -1065,8 +1046,7 @@ static void AlsaInitPCM(void)
snd_pcm_hw_params_alloca(&hw_params);
// choose all parameters
if ((err = snd_pcm_hw_params_any(handle, hw_params)) < 0) {
Error(_("audio: snd_pcm_hw_params_any: no configurations available: %s\n"),
snd_strerror(err));
Error(_("audio: snd_pcm_hw_params_any: no configurations available: %s\n"), snd_strerror(err));
}
AlsaCanPause = snd_pcm_hw_params_can_pause(hw_params);
Info(_("audio/alsa: supports pause: %s\n"), AlsaCanPause ? "yes" : "no");
@ -1119,8 +1099,7 @@ static void AlsaInitMixer(void)
Debug(3, "audio/alsa: mixer %s - %s open\n", device, channel);
snd_mixer_open(&alsa_mixer, 0);
if (alsa_mixer && snd_mixer_attach(alsa_mixer, device) >= 0
&& snd_mixer_selem_register(alsa_mixer, NULL, NULL) >= 0
&& snd_mixer_load(alsa_mixer) >= 0) {
&& snd_mixer_selem_register(alsa_mixer, NULL, NULL) >= 0 && snd_mixer_load(alsa_mixer) >= 0) {
const char *const alsa_mixer_elem_name = channel;
@ -1130,11 +1109,10 @@ static void AlsaInitMixer(void)
name = snd_mixer_selem_get_name(alsa_mixer_elem);
if (!strcasecmp(name, alsa_mixer_elem_name)) {
snd_mixer_selem_get_playback_volume_range(alsa_mixer_elem,
&alsa_mixer_elem_min, &alsa_mixer_elem_max);
snd_mixer_selem_get_playback_volume_range(alsa_mixer_elem, &alsa_mixer_elem_min, &alsa_mixer_elem_max);
AlsaRatio = 1000 * (alsa_mixer_elem_max - alsa_mixer_elem_min);
Debug(3, "audio/alsa: PCM mixer found %ld - %ld ratio %d\n",
alsa_mixer_elem_min, alsa_mixer_elem_max, AlsaRatio);
Debug(3, "audio/alsa: PCM mixer found %ld - %ld ratio %d\n", alsa_mixer_elem_min, alsa_mixer_elem_max,
AlsaRatio);
break;
}
@ -1236,16 +1214,14 @@ static int AlsaSetup(int *freq, int *channels, int passthrough)
for (;;) {
if ((err =
snd_pcm_set_params(AlsaPCMHandle, SND_PCM_FORMAT_S16,
AlsaUseMmap ? SND_PCM_ACCESS_MMAP_INTERLEAVED :
SND_PCM_ACCESS_RW_INTERLEAVED, *channels, *freq, 1,
AlsaUseMmap ? SND_PCM_ACCESS_MMAP_INTERLEAVED : SND_PCM_ACCESS_RW_INTERLEAVED, *channels, *freq, 1,
96 * 1000))) {
// try reduced buffer size (needed for sunxi)
// FIXME: alternativ make this configurable
if ((err =
snd_pcm_set_params(AlsaPCMHandle, SND_PCM_FORMAT_S16,
AlsaUseMmap ? SND_PCM_ACCESS_MMAP_INTERLEAVED :
SND_PCM_ACCESS_RW_INTERLEAVED, *channels, *freq, 1,
72 * 1000))) {
AlsaUseMmap ? SND_PCM_ACCESS_MMAP_INTERLEAVED : SND_PCM_ACCESS_RW_INTERLEAVED, *channels,
*freq, 1, 72 * 1000))) {
/*
if ( err == -EBADFD ) {
@ -1256,8 +1232,7 @@ static int AlsaSetup(int *freq, int *channels, int passthrough)
*/
if (!AudioDoingInit) {
Error(_("audio/alsa: set params error: %s\n"),
snd_strerror(err));
Error(_("audio/alsa: set params error: %s\n"), snd_strerror(err));
}
// FIXME: must stop sound, AudioChannels ... invalid
return -1;
@ -1274,41 +1249,30 @@ static int AlsaSetup(int *freq, int *channels, int passthrough)
snd_pcm_sw_params_alloca(&sw_params);
err = snd_pcm_sw_params_current(AlsaPCMHandle, sw_params);
if (err < 0) {
Error(_("audio: snd_pcm_sw_params_current failed: %s\n"),
snd_strerror(err));
Error(_("audio: snd_pcm_sw_params_current failed: %s\n"), snd_strerror(err));
}
if ((err = snd_pcm_sw_params_get_boundary(sw_params, &boundary)) < 0) {
Error(_("audio: snd_pcm_sw_params_get_boundary failed: %s\n"),
snd_strerror(err));
Error(_("audio: snd_pcm_sw_params_get_boundary failed: %s\n"), snd_strerror(err));
}
Debug(4, "audio/alsa: boundary %lu frames\n", boundary);
if ((err =
snd_pcm_sw_params_set_stop_threshold(AlsaPCMHandle, sw_params,
boundary)) < 0) {
Error(_("audio: snd_pcm_sw_params_set_silence_size failed: %s\n"),
snd_strerror(err));
if ((err = snd_pcm_sw_params_set_stop_threshold(AlsaPCMHandle, sw_params, boundary)) < 0) {
Error(_("audio: snd_pcm_sw_params_set_silence_size failed: %s\n"), snd_strerror(err));
}
if ((err =
snd_pcm_sw_params_set_silence_size(AlsaPCMHandle, sw_params,
boundary)) < 0) {
Error(_("audio: snd_pcm_sw_params_set_silence_size failed: %s\n"),
snd_strerror(err));
if ((err = snd_pcm_sw_params_set_silence_size(AlsaPCMHandle, sw_params, boundary)) < 0) {
Error(_("audio: snd_pcm_sw_params_set_silence_size failed: %s\n"), snd_strerror(err));
}
if ((err = snd_pcm_sw_params(AlsaPCMHandle, sw_params)) < 0) {
Error(_("audio: snd_pcm_sw_params failed: %s\n"),
snd_strerror(err));
Error(_("audio: snd_pcm_sw_params failed: %s\n"), snd_strerror(err));
}
}
// update buffer
snd_pcm_get_params(AlsaPCMHandle, &buffer_size, &period_size);
Debug(3, "audio/alsa: buffer size %lu %zdms, period size %lu %zdms\n",
buffer_size, snd_pcm_frames_to_bytes(AlsaPCMHandle,
buffer_size) * 1000 / (*freq * *channels * AudioBytesProSample),
Debug(3, "audio/alsa: buffer size %lu %zdms, period size %lu %zdms\n", buffer_size,
snd_pcm_frames_to_bytes(AlsaPCMHandle, buffer_size) * 1000 / (*freq * *channels * AudioBytesProSample),
period_size, snd_pcm_frames_to_bytes(AlsaPCMHandle,
period_size) * 1000 / (*freq * *channels * AudioBytesProSample));
Debug(3, "audio/alsa: state %s\n",
snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle)));
Debug(3, "audio/alsa: state %s\n", snd_pcm_state_name(snd_pcm_state(AlsaPCMHandle)));
AudioStartThreshold = snd_pcm_frames_to_bytes(AlsaPCMHandle, period_size);
// buffer time/delay in ms
@ -1316,8 +1280,7 @@ static int AlsaSetup(int *freq, int *channels, int passthrough)
if (VideoAudioDelay > 0) {
delay += VideoAudioDelay / 90;
}
if (AudioStartThreshold <
(*freq * *channels * AudioBytesProSample * delay) / 1000U) {
if (AudioStartThreshold < (*freq * *channels * AudioBytesProSample * delay) / 1000U) {
AudioStartThreshold = (*freq * *channels * AudioBytesProSample * delay) / 1000U;
}
// no bigger, than 1/3 the buffer
@ -1474,8 +1437,7 @@ static int OssPlayRingbuffer(void)
int n;
if (ioctl(OssPcmFildes, SNDCTL_DSP_GETOSPACE, &bi) == -1) {
Error(_("audio/oss: ioctl(SNDCTL_DSP_GETOSPACE): %s\n"),
strerror(errno));
Error(_("audio/oss: ioctl(SNDCTL_DSP_GETOSPACE): %s\n"), strerror(errno));
return -1;
}
Debug(4, "audio/oss: %d bytes free\n", bi.bytes);
@ -1529,8 +1491,7 @@ static void OssFlushBuffers(void)
if (OssPcmFildes != -1) {
// flush kernel buffers
if (ioctl(OssPcmFildes, SNDCTL_DSP_HALT_OUTPUT, NULL) < 0) {
Error(_("audio/oss: ioctl(SNDCTL_DSP_HALT_OUTPUT): %s\n"),
strerror(errno));
Error(_("audio/oss: ioctl(SNDCTL_DSP_HALT_OUTPUT): %s\n"), strerror(errno));
}
}
}
@ -1614,13 +1575,11 @@ static int OssOpenPCM(int passthrough)
device = "/dev/dsp";
}
if (!AudioDoingInit) {
Info(_("audio/oss: using %sdevice '%s'\n"),
passthrough ? "pass-through " : "", device);
Info(_("audio/oss: using %sdevice '%s'\n"), passthrough ? "pass-through " : "", device);
}
if ((fildes = open(device, O_WRONLY)) < 0) {
Error(_("audio/oss: can't open dsp device '%s': %s\n"), device,
strerror(errno));
Error(_("audio/oss: can't open dsp device '%s': %s\n"), device, strerror(errno));
return -1;
}
return fildes;
@ -1666,8 +1625,7 @@ static void OssSetVolume(int volume)
/**
** Mixer channel name table.
*/
static const char *OssMixerChannelNames[SOUND_MIXER_NRDEVICES] =
SOUND_DEVICE_NAMES;
static const char *OssMixerChannelNames[SOUND_MIXER_NRDEVICES] = SOUND_DEVICE_NAMES;
/**
** Initialize OSS mixer.
@ -1693,14 +1651,12 @@ static void OssInitMixer(void)
Debug(3, "audio/oss: mixer %s - %s open\n", device, channel);
if ((fildes = open(device, O_RDWR)) < 0) {
Error(_("audio/oss: can't open mixer device '%s': %s\n"), device,
strerror(errno));
Error(_("audio/oss: can't open mixer device '%s': %s\n"), device, strerror(errno));
return;
}
// search channel name
if (ioctl(fildes, SOUND_MIXER_READ_DEVMASK, &devmask) < 0) {
Error(_("audio/oss: ioctl(SOUND_MIXER_READ_DEVMASK): %s\n"),
strerror(errno));
Error(_("audio/oss: ioctl(SOUND_MIXER_READ_DEVMASK): %s\n"), strerror(errno));
close(fildes);
return;
}
@ -1744,8 +1700,7 @@ static int64_t OssGetDelay(void)
// delay in bytes in kernel buffers
delay = -1;
if (ioctl(OssPcmFildes, SNDCTL_DSP_GETODELAY, &delay) == -1) {
Error(_("audio/oss: ioctl(SNDCTL_DSP_GETODELAY): %s\n"),
strerror(errno));
Error(_("audio/oss: ioctl(SNDCTL_DSP_GETODELAY): %s\n"), strerror(errno));
return 0L;
}
if (delay < 0) {
@ -1753,8 +1708,7 @@ static int64_t OssGetDelay(void)
}
pts = ((int64_t) delay * 90 * 1000)
/ (AudioRing[AudioRingRead].HwSampleRate *
AudioRing[AudioRingRead].HwChannels * AudioBytesProSample);
/ (AudioRing[AudioRingRead].HwSampleRate * AudioRing[AudioRingRead].HwChannels * AudioBytesProSample);
return pts;
}
@ -1810,13 +1764,11 @@ static int OssSetup(int *sample_rate, int *channels, int passthrough)
tmp = *channels;
if (ioctl(OssPcmFildes, SNDCTL_DSP_CHANNELS, &tmp) == -1) {
Error(_("audio/oss: ioctl(SNDCTL_DSP_CHANNELS): %s\n"),
strerror(errno));
Error(_("audio/oss: ioctl(SNDCTL_DSP_CHANNELS): %s\n"), strerror(errno));
return -1;
}
if (tmp != *channels) {
Warning(_("audio/oss: device doesn't support %d channels.\n"),
*channels);
Warning(_("audio/oss: device doesn't support %d channels.\n"), *channels);
*channels = tmp;
ret = 1;
}
@ -1827,8 +1779,7 @@ static int OssSetup(int *sample_rate, int *channels, int passthrough)
return -1;
}
if (tmp != *sample_rate) {
Warning(_("audio/oss: device doesn't support %dHz sample rate.\n"),
*sample_rate);
Warning(_("audio/oss: device doesn't support %dHz sample rate.\n"), *sample_rate);
*sample_rate = tmp;
ret = 1;
}
@ -1842,8 +1793,7 @@ static int OssSetup(int *sample_rate, int *channels, int passthrough)
#endif
if (ioctl(OssPcmFildes, SNDCTL_DSP_GETOSPACE, &bi) == -1) {
Error(_("audio/oss: ioctl(SNDCTL_DSP_GETOSPACE): %s\n"),
strerror(errno));
Error(_("audio/oss: ioctl(SNDCTL_DSP_GETOSPACE): %s\n"), strerror(errno));
bi.fragsize = 4096;
bi.fragstotal = 16;
} else {
@ -1853,10 +1803,9 @@ static int OssSetup(int *sample_rate, int *channels, int passthrough)
OssFragmentTime = (bi.fragsize * 1000)
/ (*sample_rate * *channels * AudioBytesProSample);
Debug(3, "audio/oss: buffer size %d %dms, fragment size %d %dms\n",
bi.fragsize * bi.fragstotal, (bi.fragsize * bi.fragstotal * 1000)
/ (*sample_rate * *channels * AudioBytesProSample), bi.fragsize,
OssFragmentTime);
Debug(3, "audio/oss: buffer size %d %dms, fragment size %d %dms\n", bi.fragsize * bi.fragstotal,
(bi.fragsize * bi.fragstotal * 1000)
/ (*sample_rate * *channels * AudioBytesProSample), bi.fragsize, OssFragmentTime);
// start when enough bytes for initial write
AudioStartThreshold = (bi.fragsize - 1) * bi.fragstotal;
@ -1866,10 +1815,8 @@ static int OssSetup(int *sample_rate, int *channels, int passthrough)
if (VideoAudioDelay > 0) {
delay += VideoAudioDelay / 90;
}
if (AudioStartThreshold <
(*sample_rate * *channels * AudioBytesProSample * delay) / 1000U) {
AudioStartThreshold =
(*sample_rate * *channels * AudioBytesProSample * delay) / 1000U;
if (AudioStartThreshold < (*sample_rate * *channels * AudioBytesProSample * delay) / 1000U) {
AudioStartThreshold = (*sample_rate * *channels * AudioBytesProSample * delay) / 1000U;
}
// no bigger, than 1/3 the buffer
if (AudioStartThreshold > AudioRingBufferSize / 3) {
@ -2025,8 +1972,7 @@ static int AudioNextRing(void)
sample_rate = AudioRing[AudioRingRead].HwSampleRate;
channels = AudioRing[AudioRingRead].HwChannels;
if (AudioUsedModule->Setup(&sample_rate, &channels, passthrough)) {
Error(_("audio: can't set channels %d sample-rate %dHz\n"), channels,
sample_rate);
Error(_("audio: can't set channels %d sample-rate %dHz\n"), channels, sample_rate);
// FIXME: handle error
AudioRing[AudioRingRead].HwSampleRate = 0;
AudioRing[AudioRingRead].InSampleRate = 0;
@ -2039,13 +1985,11 @@ static int AudioNextRing(void)
Debug(3, "audio: a/v next buf(%d,%4zdms)\n", atomic_read(&AudioRingFilled),
(RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer) * 1000)
/ (AudioRing[AudioRingWrite].HwSampleRate *
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample));
/ (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample));
// stop, if not enough in next buffer
used = RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer);
if (AudioStartThreshold * 10 < used || (AudioVideoIsReady
&& AudioStartThreshold < used)) {
if (AudioStartThreshold * 10 < used || (AudioVideoIsReady && AudioStartThreshold < used)) {
return 0;
}
return 1;
@ -2077,10 +2021,8 @@ static void *AudioPlayHandlerThread(void *dummy)
pthread_mutex_unlock(&AudioMutex);
Debug(3, "audio: ----> %dms start\n", (AudioUsedBytes() * 1000)
/ (!AudioRing[AudioRingWrite].HwSampleRate +
!AudioRing[AudioRingWrite].HwChannels +
AudioRing[AudioRingWrite].HwSampleRate *
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample));
/ (!AudioRing[AudioRingWrite].HwSampleRate + !AudioRing[AudioRingWrite].HwChannels +
AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample));
do {
int filled;
@ -2150,12 +2092,10 @@ static void *AudioPlayHandlerThread(void *dummy)
passthrough = AudioRing[AudioRingRead].Passthrough;
sample_rate = AudioRing[AudioRingRead].HwSampleRate;
channels = AudioRing[AudioRingRead].HwChannels;
Debug(3, "audio: thread channels %d frequency %dHz %s\n",
channels, sample_rate, passthrough ? "pass-through" : "");
Debug(3, "audio: thread channels %d frequency %dHz %s\n", channels, sample_rate,
passthrough ? "pass-through" : "");
// audio config changed?
if (old_passthrough != passthrough
|| old_sample_rate != sample_rate
|| old_channels != channels) {
if (old_passthrough != passthrough || old_sample_rate != sample_rate || old_channels != channels) {
// FIXME: wait for buffer drain
if (AudioNextRing()) {
Debug(3, "audio: HandlerThread break on nextring");
@ -2228,16 +2168,19 @@ static const AudioModule *AudioModules[] = {
&NoopModule,
};
void AudioDelayms(int delayms) {
void AudioDelayms(int delayms)
{
int count;
unsigned char *p;
#ifdef DEBUG
printf("Try Delay Audio for %d ms Samplerate %d Channels %d bps %d\n",
delayms,AudioRing[AudioRingWrite].HwSampleRate,AudioRing[AudioRingWrite].HwChannels,AudioBytesProSample);
printf("Try Delay Audio for %d ms Samplerate %d Channels %d bps %d\n", delayms,
AudioRing[AudioRingWrite].HwSampleRate, AudioRing[AudioRingWrite].HwChannels, AudioBytesProSample);
#endif
count = delayms * AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample / 1000;
count =
delayms * AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample /
1000;
if (delayms < 5000 && delayms > 0) { // not more than 5seconds
p = calloc(1, count);
@ -2279,37 +2222,28 @@ void AudioEnqueue(const void *samples, int count)
}
// audio sample modification allowed and needed?
buffer = (void *)samples;
if (!AudioRing[AudioRingWrite].Passthrough && (AudioCompression
|| AudioNormalize
|| AudioRing[AudioRingWrite].InChannels !=
AudioRing[AudioRingWrite].HwChannels)) {
if (!AudioRing[AudioRingWrite].Passthrough && (AudioCompression || AudioNormalize
|| AudioRing[AudioRingWrite].InChannels != AudioRing[AudioRingWrite].HwChannels)) {
int frames;
// resample into ring-buffer is too complex in the case of a roundabout
// just use a temporary buffer
frames =
count / (AudioRing[AudioRingWrite].InChannels *
AudioBytesProSample);
buffer =
alloca(frames * AudioRing[AudioRingWrite].HwChannels *
AudioBytesProSample);
frames = count / (AudioRing[AudioRingWrite].InChannels * AudioBytesProSample);
buffer = alloca(frames * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample);
#ifdef USE_AUDIO_MIXER
// Convert / resample input to hardware format
AudioResample(samples, AudioRing[AudioRingWrite].InChannels, frames,
buffer, AudioRing[AudioRingWrite].HwChannels);
AudioResample(samples, AudioRing[AudioRingWrite].InChannels, frames, buffer,
AudioRing[AudioRingWrite].HwChannels);
#else
#ifdef DEBUG
if (AudioRing[AudioRingWrite].InChannels !=
AudioRing[AudioRingWrite].HwChannels) {
if (AudioRing[AudioRingWrite].InChannels != AudioRing[AudioRingWrite].HwChannels) {
Debug(3, "audio: internal failure channels mismatch\n");
return;
}
#endif
memcpy(buffer, samples, count);
#endif
count =
frames * AudioRing[AudioRingWrite].HwChannels *
AudioBytesProSample;
count = frames * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample;
if (AudioCompression) { // in place operation
AudioCompressor(buffer, count);
@ -2336,11 +2270,9 @@ void AudioEnqueue(const void *samples, int count)
// FIXME: round to packet size
Debug(4, "audio: start? %4zdms skip %dms\n", (n * 1000)
/ (AudioRing[AudioRingWrite].HwSampleRate *
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample),
/ (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample),
(skip * 1000)
/ (AudioRing[AudioRingWrite].HwSampleRate *
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample));
/ (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample));
if (skip) {
if (n < (unsigned)skip) {
@ -2365,8 +2297,7 @@ void AudioEnqueue(const void *samples, int count)
// Update audio clock (stupid gcc developers thinks INT64_C is unsigned)
if (AudioRing[AudioRingWrite].PTS != (int64_t) INT64_C(0x8000000000000000)) {
AudioRing[AudioRingWrite].PTS += ((int64_t) count * 90 * 1000)
/ (AudioRing[AudioRingWrite].HwSampleRate *
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample);
/ (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample);
}
}
@ -2385,10 +2316,8 @@ void AudioVideoReady(int64_t pts)
return;
}
// no valid audio known
if (!AudioRing[AudioRingWrite].HwSampleRate
|| !AudioRing[AudioRingWrite].HwChannels
|| AudioRing[AudioRingWrite].PTS ==
(int64_t) INT64_C(0x8000000000000000)) {
if (!AudioRing[AudioRingWrite].HwSampleRate || !AudioRing[AudioRingWrite].HwChannels
|| AudioRing[AudioRingWrite].PTS == (int64_t) INT64_C(0x8000000000000000)) {
Debug(3, "audio: a/v start, no valid audio\n");
AudioVideoIsReady = 1;
return;
@ -2398,26 +2327,22 @@ void AudioVideoReady(int64_t pts)
used = RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer);
audio_pts =
AudioRing[AudioRingWrite].PTS -
(used * 90 * 1000) / (AudioRing[AudioRingWrite].HwSampleRate *
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample);
(used * 90 * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels *
AudioBytesProSample);
Debug(3, "audio: a/v sync buf(%d,%4zdms) %s | %s = %dms %s\n",
atomic_read(&AudioRingFilled),
(used * 1000) / (AudioRing[AudioRingWrite].HwSampleRate *
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample),
Timestamp2String(pts), Timestamp2String(audio_pts),
(int)(pts - audio_pts) / 90, AudioRunning ? "running" : "ready");
Debug(3, "audio: a/v sync buf(%d,%4zdms) %s | %s = %dms %s\n", atomic_read(&AudioRingFilled),
(used * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels *
AudioBytesProSample), Timestamp2String(pts), Timestamp2String(audio_pts), (int)(pts - audio_pts) / 90,
AudioRunning ? "running" : "ready");
if (!AudioRunning) {
int skip;
// buffer ~15 video frames
// FIXME: HDTV can use smaller video buffer
skip =
pts - 15 * 20 * 90 - AudioBufferTime * 90 - audio_pts + VideoAudioDelay;
skip = pts - 15 * 20 * 90 - AudioBufferTime * 90 - audio_pts + VideoAudioDelay;
#ifdef DEBUG
fprintf(stderr, "%dms %dms %dms\n", (int)(pts - audio_pts) / 90,
VideoAudioDelay / 90, skip / 90);
fprintf(stderr, "%dms %dms %dms\n", (int)(pts - audio_pts) / 90, VideoAudioDelay / 90, skip / 90);
#endif
// guard against old PTS
if (skip > 0 && skip < 4000 * 90) {
@ -2429,14 +2354,12 @@ void AudioVideoReady(int64_t pts)
skip = used;
}
Debug(3, "audio: sync advance %dms %d/%zd\n",
(skip * 1000) / (AudioRing[AudioRingWrite].HwSampleRate *
AudioRing[AudioRingWrite].HwChannels *
(skip * 1000) / (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels *
AudioBytesProSample), skip, used);
RingBufferReadAdvance(AudioRing[AudioRingWrite].RingBuffer, skip);
used = RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer);
}
else {
} else {
Debug(3, "No audio skip -> should skip %d\n", skip / 90);
}
// FIXME: skip<0 we need bigger audio buffer
@ -2451,21 +2374,16 @@ void AudioVideoReady(int64_t pts)
AudioVideoIsReady = 1;
#if 0
if (AudioRing[AudioRingWrite].HwSampleRate
&& AudioRing[AudioRingWrite].HwChannels) {
if (AudioRing[AudioRingWrite].HwSampleRate && AudioRing[AudioRingWrite].HwChannels) {
if (pts != (int64_t) INT64_C(0x8000000000000000)
&& AudioRing[AudioRingWrite].PTS !=
(int64_t) INT64_C(0x8000000000000000)) {
Debug(3, "audio: a/v %d %s\n",
(int)(pts - AudioRing[AudioRingWrite].PTS) / 90,
&& AudioRing[AudioRingWrite].PTS != (int64_t) INT64_C(0x8000000000000000)) {
Debug(3, "audio: a/v %d %s\n", (int)(pts - AudioRing[AudioRingWrite].PTS) / 90,
AudioRunning ? "running" : "stopped");
}
Debug(3, "audio: start %4zdms %s|%s video ready\n",
(RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer) * 1000)
/ (AudioRing[AudioRingWrite].HwSampleRate *
AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample),
Timestamp2String(pts),
Timestamp2String(AudioRing[AudioRingWrite].PTS));
/ (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels * AudioBytesProSample),
Timestamp2String(pts), Timestamp2String(AudioRing[AudioRingWrite].PTS));
if (!AudioRunning) {
size_t used;
@ -2475,13 +2393,10 @@ void AudioVideoReady(int64_t pts)
if (AudioStartThreshold < used) {
// too much audio buffered, skip it
if (AudioStartThreshold < used) {
Debug(3, "audio: start %4zdms skip video ready\n",
((used - AudioStartThreshold) * 1000)
/ (AudioRing[AudioRingWrite].HwSampleRate *
AudioRing[AudioRingWrite].HwChannels *
Debug(3, "audio: start %4zdms skip video ready\n", ((used - AudioStartThreshold) * 1000)
/ (AudioRing[AudioRingWrite].HwSampleRate * AudioRing[AudioRingWrite].HwChannels *
AudioBytesProSample));
RingBufferReadAdvance(AudioRing[AudioRingWrite].RingBuffer,
used - AudioStartThreshold);
RingBufferReadAdvance(AudioRing[AudioRingWrite].RingBuffer, used - AudioStartThreshold);
}
AudioRunning = 1;
pthread_cond_signal(&AudioStartCond);
@ -2562,8 +2477,7 @@ void AudioPoller(void)
*/
int AudioFreeBytes(void)
{
return AudioRing[AudioRingWrite].RingBuffer ?
RingBufferFreeBytes(AudioRing[AudioRingWrite].RingBuffer)
return AudioRing[AudioRingWrite].RingBuffer ? RingBufferFreeBytes(AudioRing[AudioRingWrite].RingBuffer)
: INT32_MAX;
}
@ -2573,8 +2487,7 @@ int AudioFreeBytes(void)
int AudioUsedBytes(void)
{
// FIXME: not correct, if multiple buffer are in use
return AudioRing[AudioRingWrite].RingBuffer ?
RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer) : 0;
return AudioRing[AudioRingWrite].RingBuffer ? RingBufferUsedBytes(AudioRing[AudioRingWrite].RingBuffer) : 0;
}
/**
@ -2597,10 +2510,10 @@ int64_t AudioGetDelay(void)
}
pts = AudioUsedModule->GetDelay();
pts += ((int64_t) RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer)
* 90 * 1000) / (AudioRing[AudioRingRead].HwSampleRate *
AudioRing[AudioRingRead].HwChannels * AudioBytesProSample);
Debug(4, "audio: hw+sw delay %zd %" PRId64 "ms\n",
RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer), pts / 90);
* 90 * 1000) / (AudioRing[AudioRingRead].HwSampleRate * AudioRing[AudioRingRead].HwChannels *
AudioBytesProSample);
Debug(4, "audio: hw+sw delay %zd %" PRId64 "ms\n", RingBufferUsedBytes(AudioRing[AudioRingRead].RingBuffer),
pts / 90);
return pts;
}
@ -2613,8 +2526,7 @@ int64_t AudioGetDelay(void)
void AudioSetClock(int64_t pts)
{
if (AudioRing[AudioRingWrite].PTS != pts) {
Debug(4, "audio: set clock %s -> %s pts\n",
Timestamp2String(AudioRing[AudioRingWrite].PTS),
Debug(4, "audio: set clock %s -> %s pts\n", Timestamp2String(AudioRing[AudioRingWrite].PTS),
Timestamp2String(pts));
}
AudioRing[AudioRingWrite].PTS = pts;
@ -2652,8 +2564,7 @@ void AudioSetVolume(int volume)
AudioVolume = volume;
AudioMute = !volume;
// reduce loudness for stereo output
if (AudioStereoDescent && AudioRing[AudioRingRead].InChannels == 2
&& !AudioRing[AudioRingRead].Passthrough) {
if (AudioStereoDescent && AudioRing[AudioRingRead].InChannels == 2 && !AudioRing[AudioRingRead].Passthrough) {
volume -= AudioStereoDescent;
if (volume < 0) {
volume = 0;
@ -2682,8 +2593,7 @@ void AudioSetVolume(int volume)
*/
int AudioSetup(int *freq, int *channels, int passthrough)
{
Debug(3, "audio: setup channels %d frequency %dHz %s\n", *channels, *freq,
passthrough ? "pass-through" : "");
Debug(3, "audio: setup channels %d frequency %dHz %s\n", *channels, *freq, passthrough ? "pass-through" : "");
// invalid parameter
if (!freq || !channels || !*freq || !*channels) {
@ -3023,12 +2933,9 @@ void AudioInit(void)
}
}
for (u = 0; u < AudioRatesMax; ++u) {
Info(_("audio: %6dHz supports %d %d %d %d %d %d %d %d channels\n"),
AudioRatesTable[u], AudioChannelMatrix[u][1],
AudioChannelMatrix[u][2], AudioChannelMatrix[u][3],
AudioChannelMatrix[u][4], AudioChannelMatrix[u][5],
AudioChannelMatrix[u][6], AudioChannelMatrix[u][7],
AudioChannelMatrix[u][8]);
Info(_("audio: %6dHz supports %d %d %d %d %d %d %d %d channels\n"), AudioRatesTable[u],
AudioChannelMatrix[u][1], AudioChannelMatrix[u][2], AudioChannelMatrix[u][3], AudioChannelMatrix[u][4],
AudioChannelMatrix[u][5], AudioChannelMatrix[u][6], AudioChannelMatrix[u][7], AudioChannelMatrix[u][8]);
}
#ifdef USE_AUDIO_THREAD
if (AudioUsedModule->Thread) { // supports threads
@ -3101,8 +3008,7 @@ static void PrintVersion(void)
#ifdef GIT_REV
"(GIT-" GIT_REV ")"
#endif
",\n\t(c) 2009 - 2013 by Johns\n"
"\tLicense AGPLv3: GNU Affero General Public License version 3\n");
",\n\t(c) 2009 - 2013 by Johns\n" "\tLicense AGPLv3: GNU Affero General Public License version 3\n");
}
/**
@ -3110,8 +3016,7 @@ static void PrintVersion(void)
*/
static void PrintUsage(void)
{
printf("Usage: audio_test [-?dhv]\n"
"\t-d\tenable debug, more -d increase the verbosity\n"
printf("Usage: audio_test [-?dhv]\n" "\t-d\tenable debug, more -d increase the verbosity\n"
"\t-? -h\tdisplay this message\n" "\t-v\tdisplay version information\n"
"Only idiots print usage on stderr!\n");
}

241
codec.c
View File

@ -100,6 +100,7 @@ static pthread_mutex_t CodecLockMutex;
/// Flag prefer fast channel switch
char CodecUsePossibleDefectFrames;
AVBufferRef *hw_device_ctx;
//----------------------------------------------------------------------------
// Video
//----------------------------------------------------------------------------
@ -137,11 +138,11 @@ struct _video_decoder_
** valid format, the formats are ordered by
** quality.
*/
static enum AVPixelFormat Codec_get_format(AVCodecContext * video_ctx,
const enum AVPixelFormat *fmt)
static enum AVPixelFormat Codec_get_format(AVCodecContext * video_ctx, const enum AVPixelFormat *fmt)
{
VideoDecoder *decoder;
enum AVPixelFormat fmt1;
decoder = video_ctx->opaque;
// bug in ffmpeg 1.1.1, called with zero width or height
@ -152,7 +153,6 @@ static enum AVPixelFormat Codec_get_format(AVCodecContext * video_ctx,
// decoder->GetFormatDone = 1;
return Video_get_format(decoder->HwDecoder, video_ctx, fmt);
}
//static void Codec_free_buffer(void *opaque, uint8_t *data);
@ -181,9 +181,8 @@ static int Codec_get_buffer2(AVCodecContext * video_ctx, AVFrame * frame, int fl
Codec_get_format(video_ctx, fmts);
}
#if 0
if (decoder->hwaccel_get_buffer && (AV_PIX_FMT_VDPAU == decoder->hwaccel_pix_fmt ||
AV_PIX_FMT_CUDA == decoder->hwaccel_pix_fmt ||
AV_PIX_FMT_VAAPI == decoder->hwaccel_pix_fmt)) {
if (decoder->hwaccel_get_buffer && (AV_PIX_FMT_VDPAU == decoder->hwaccel_pix_fmt
|| AV_PIX_FMT_CUDA == decoder->hwaccel_pix_fmt || AV_PIX_FMT_VAAPI == decoder->hwaccel_pix_fmt)) {
//Debug(3,"hwaccel get_buffer\n");
return decoder->hwaccel_get_buffer(video_ctx, frame, flags);
}
@ -225,7 +224,6 @@ void CodecVideoDelDecoder(VideoDecoder * decoder)
free(decoder);
}
/**
** Open video decoder.
**
@ -280,7 +278,6 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
}
decoder->VideoCtx->hw_device_ctx = av_buffer_ref(HwDeviceContext);
// FIXME: for software decoder use all cpus, otherwise 1
decoder->VideoCtx->thread_count = 1;
@ -289,7 +286,6 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
decoder->VideoCtx->framerate.num = 50;
decoder->VideoCtx->framerate.den = 1;
pthread_mutex_lock(&CodecLockMutex);
// open codec
#ifdef YADIF
@ -325,8 +321,6 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
decoder->VideoCtx->thread_safe_callbacks = 0;
#endif
#ifdef CUVID
if (strcmp(decoder->VideoCodec->long_name, "Nvidia CUVID MPEG2VIDEO decoder") == 0) { // deinterlace for mpeg2 is somehow broken
if (av_opt_set_int(decoder->VideoCtx->priv_data, "deint", deint, 0) < 0) { // adaptive
@ -343,8 +337,7 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
pthread_mutex_unlock(&CodecLockMutex);
Fatal(_("codec: can't set option drop 2.field to video codec!\n"));
}
}
else if (strstr(decoder->VideoCodec->long_name,"Nvidia CUVID") != NULL) {
} else if (strstr(decoder->VideoCodec->long_name, "Nvidia CUVID") != NULL) {
if (av_opt_set_int(decoder->VideoCtx->priv_data, "deint", deint, 0) < 0) { // adaptive
pthread_mutex_unlock(&CodecLockMutex);
Fatal(_("codec: can't set option deint to video codec!\n"));
@ -384,7 +377,6 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
decoder->VideoCtx->draw_horiz_band = NULL;
decoder->VideoCtx->hwaccel_context = VideoGetHwAccelContext(decoder->HwDecoder);
//
// Prepare frame buffer for decoder
//
@ -401,7 +393,6 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
#endif
}
/**
** Close video decoder.
**
@ -410,6 +401,7 @@ void CodecVideoOpen(VideoDecoder * decoder, int codec_id)
void CodecVideoClose(VideoDecoder * video_decoder)
{
AVFrame *frame;
// FIXME: play buffered data
// av_frame_free(&video_decoder->Frame); // callee does checks
@ -454,9 +446,8 @@ void DisplayPts(AVCodecContext * video_ctx, AVFrame * frame)
}
ms_delay = (1000 * video_ctx->time_base.num) / video_ctx->time_base.den;
ms_delay += frame->repeat_pict * ms_delay / 2;
printf("codec: PTS %s%s %" PRId64 " %d %d/%d %d/%d %dms\n",
frame->repeat_pict ? "r" : " ", frame->interlaced_frame ? "I" : " ",
pts, (int)(pts - last_pts) / 90, video_ctx->time_base.num,
printf("codec: PTS %s%s %" PRId64 " %d %d/%d %d/%d %dms\n", frame->repeat_pict ? "r" : " ",
frame->interlaced_frame ? "I" : " ", pts, (int)(pts - last_pts) / 90, video_ctx->time_base.num,
video_ctx->time_base.den, video_ctx->framerate.num, video_ctx->framerate.den, ms_delay);
if (pts != (int64_t) AV_NOPTS_VALUE) {
@ -473,6 +464,7 @@ void DisplayPts(AVCodecContext * video_ctx, AVFrame * frame)
** @param avpkt video packet
*/
extern int CuvidTestSurfaces();
#ifdef YADIF
extern int init_filters(AVCodecContext * dec_ctx, void *decoder, AVFrame * frame);
extern int push_filters(AVCodecContext * dec_ctx, void *decoder, AVFrame * frame);
@ -506,8 +498,7 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
if (init_filters(video_ctx, decoder->HwDecoder, frame) < 0) {
Debug(3, "video: Init of VAAPI deint Filter failed\n");
decoder->filter = 0;
}
else {
} else {
Debug(3, "Init VAAPI deint ok\n");
decoder->filter = 2;
}
@ -518,8 +509,7 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
}
}
VideoRenderFrame(decoder->HwDecoder, video_ctx, frame);
}
else {
} else {
av_frame_free(&frame);
}
}
@ -530,9 +520,7 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
{
AVCodecContext *video_ctx;
AVFrame *frame
;
AVFrame *frame;
int ret, ret1;
int got_frame;
int consumed = 0;
@ -566,8 +554,7 @@ next_part:
ret = avcodec_receive_frame(video_ctx, frame); // get new frame
if (ret >= 0) { // one is avail.
got_frame = 1;
}
else {
} else {
got_frame = 0;
}
// printf("got %s packet from decoder\n",got_frame?"1":"no");
@ -578,8 +565,7 @@ next_part:
if (init_filters(video_ctx, decoder->HwDecoder, frame) < 0) {
Fatal(_("video: Init of YADIF Filter failed\n"));
decoder->filter = 0;
}
else {
} else {
Debug(3, "Init YADIF ok\n");
decoder->filter = 2;
}
@ -757,8 +743,7 @@ void CodecAudioOpen(AudioDecoder * audio_decoder, int codec_id)
{
AVCodec *audio_codec;
Debug(3, "codec: using audio codec ID %#06x (%s)\n", codec_id,
avcodec_get_name(codec_id));
Debug(3, "codec: using audio codec ID %#06x (%s)\n", codec_id, avcodec_get_name(codec_id));
if (!(audio_codec = avcodec_find_decoder(codec_id))) {
// if (!(audio_codec = avcodec_find_decoder(codec_id))) {
Fatal(_("codec: codec ID %#06x not found\n"), codec_id);
@ -771,8 +756,7 @@ void CodecAudioOpen(AudioDecoder * audio_decoder, int codec_id)
}
if (CodecDownmix) {
audio_decoder->AudioCtx->request_channel_layout =
AV_CH_LAYOUT_STEREO_DOWNMIX;
audio_decoder->AudioCtx->request_channel_layout = AV_CH_LAYOUT_STEREO_DOWNMIX;
}
pthread_mutex_lock(&CodecLockMutex);
// open codec
@ -793,7 +777,6 @@ void CodecAudioOpen(AudioDecoder * audio_decoder, int codec_id)
pthread_mutex_unlock(&CodecLockMutex);
Debug(3, "codec: audio '%s'\n", audio_decoder->AudioCodec->long_name);
audio_decoder->SampleRate = 0;
audio_decoder->Channels = 0;
audio_decoder->HwSampleRate = 0;
@ -954,19 +937,16 @@ static void CodecReorderAudioFrame(int16_t * buf, int size, int channels)
** @param audio_decoder audio decoder data
** @param[out] passthrough pass-through output
*/
static int CodecAudioUpdateHelper(AudioDecoder * audio_decoder,
int *passthrough)
static int CodecAudioUpdateHelper(AudioDecoder * audio_decoder, int *passthrough)
{
const AVCodecContext *audio_ctx;
int err;
audio_ctx = audio_decoder->AudioCtx;
Debug(3, "codec/audio: format change %s %dHz *%d channels%s%s%s%s%s\n",
av_get_sample_fmt_name(audio_ctx->sample_fmt), audio_ctx->sample_rate,
audio_ctx->channels, CodecPassthrough & CodecPCM ? " PCM" : "",
CodecPassthrough & CodecMPA ? " MPA" : "",
CodecPassthrough & CodecAC3 ? " AC-3" : "",
CodecPassthrough & CodecEAC3 ? " E-AC-3" : "",
av_get_sample_fmt_name(audio_ctx->sample_fmt), audio_ctx->sample_rate, audio_ctx->channels,
CodecPassthrough & CodecPCM ? " PCM" : "", CodecPassthrough & CodecMPA ? " MPA" : "",
CodecPassthrough & CodecAC3 ? " AC-3" : "", CodecPassthrough & CodecEAC3 ? " E-AC-3" : "",
CodecPassthrough ? " pass-through" : "");
*passthrough = 0;
@ -978,8 +958,7 @@ static int CodecAudioUpdateHelper(AudioDecoder * audio_decoder,
// SPDIF/HDMI pass-through
if ((CodecPassthrough & CodecAC3 && audio_ctx->codec_id == AV_CODEC_ID_AC3)
|| (CodecPassthrough & CodecEAC3
&& audio_ctx->codec_id == AV_CODEC_ID_EAC3)) {
|| (CodecPassthrough & CodecEAC3 && audio_ctx->codec_id == AV_CODEC_ID_EAC3)) {
if (audio_ctx->codec_id == AV_CODEC_ID_EAC3) {
// E-AC-3 over HDMI some receivers need HBR
audio_decoder->HwSampleRate *= 4;
@ -990,16 +969,12 @@ static int CodecAudioUpdateHelper(AudioDecoder * audio_decoder,
*passthrough = 1;
}
// channels/sample-rate not support?
if ((err =
AudioSetup(&audio_decoder->HwSampleRate,
&audio_decoder->HwChannels, *passthrough))) {
if ((err = AudioSetup(&audio_decoder->HwSampleRate, &audio_decoder->HwChannels, *passthrough))) {
// try E-AC-3 none HBR
audio_decoder->HwSampleRate /= 4;
if (audio_ctx->codec_id != AV_CODEC_ID_EAC3
|| (err =
AudioSetup(&audio_decoder->HwSampleRate,
&audio_decoder->HwChannels, *passthrough))) {
|| (err = AudioSetup(&audio_decoder->HwSampleRate, &audio_decoder->HwChannels, *passthrough))) {
Debug(3, "codec/audio: audio setup error\n");
// FIXME: handle errors
@ -1009,9 +984,8 @@ static int CodecAudioUpdateHelper(AudioDecoder * audio_decoder,
}
}
Debug(3, "codec/audio: resample %s %dHz *%d -> %s %dHz *%d\n",
av_get_sample_fmt_name(audio_ctx->sample_fmt), audio_ctx->sample_rate,
audio_ctx->channels, av_get_sample_fmt_name(AV_SAMPLE_FMT_S16),
Debug(3, "codec/audio: resample %s %dHz *%d -> %s %dHz *%d\n", av_get_sample_fmt_name(audio_ctx->sample_fmt),
audio_ctx->sample_rate, audio_ctx->channels, av_get_sample_fmt_name(AV_SAMPLE_FMT_S16),
audio_decoder->HwSampleRate, audio_decoder->HwChannels);
return 0;
@ -1023,8 +997,7 @@ static int CodecAudioUpdateHelper(AudioDecoder * audio_decoder,
** @param audio_decoder audio decoder data
** @param avpkt undecoded audio packet
*/
static int CodecAudioPassthroughHelper(AudioDecoder * audio_decoder,
const AVPacket * avpkt)
static int CodecAudioPassthroughHelper(AudioDecoder * audio_decoder, const AVPacket * avpkt)
{
#ifdef USE_PASSTHROUGH
const AVCodecContext *audio_ctx;
@ -1045,12 +1018,10 @@ static int CodecAudioPassthroughHelper(AudioDecoder * audio_decoder,
int x;
x = (audio_decoder->DriftFrac +
(audio_decoder->DriftCorr * spdif_sz)) / (10 *
audio_decoder->HwSampleRate * 100);
(audio_decoder->DriftCorr * spdif_sz)) / (10 * audio_decoder->HwSampleRate * 100);
audio_decoder->DriftFrac =
(audio_decoder->DriftFrac +
(audio_decoder->DriftCorr * spdif_sz)) % (10 *
audio_decoder->HwSampleRate * 100);
(audio_decoder->DriftCorr * spdif_sz)) % (10 * audio_decoder->HwSampleRate * 100);
// round to word border
x *= audio_decoder->HwChannels * 4;
if (x < -64) { // limit correction
@ -1081,8 +1052,7 @@ static int CodecAudioPassthroughHelper(AudioDecoder * audio_decoder,
AudioEnqueue(spdif, spdif_sz);
return 1;
}
if (CodecPassthrough & CodecEAC3
&& audio_ctx->codec_id == AV_CODEC_ID_EAC3) {
if (CodecPassthrough & CodecEAC3 && audio_ctx->codec_id == AV_CODEC_ID_EAC3) {
uint16_t *spdif;
int spdif_sz;
int repeat;
@ -1121,8 +1091,7 @@ static int CodecAudioPassthroughHelper(AudioDecoder * audio_decoder,
spdif[1] = htole16(0x4E1F);
spdif[2] = htole16(IEC61937_EAC3);
spdif[3] = htole16(audio_decoder->SpdifIndex * 8);
memset(spdif + 4 + audio_decoder->SpdifIndex / 2, 0,
spdif_sz - 8 - audio_decoder->SpdifIndex);
memset(spdif + 4 + audio_decoder->SpdifIndex / 2, 0, spdif_sz - 8 - audio_decoder->SpdifIndex);
// don't play with the eac-3 samples
AudioEnqueue(spdif, spdif_sz);
@ -1165,8 +1134,7 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
audio_decoder->LastDelay = delay;
audio_decoder->Drift = 0;
audio_decoder->DriftFrac = 0;
Debug(3, "codec/audio: inital drift delay %" PRId64 "ms\n",
delay / 90);
Debug(3, "codec/audio: inital drift delay %" PRId64 "ms\n", delay / 90);
return;
}
// collect over some time
@ -1176,12 +1144,9 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
}
tim_diff = (nowtime.tv_sec - audio_decoder->LastTime.tv_sec)
* 1000 * 1000 * 1000 + (nowtime.tv_nsec -
audio_decoder->LastTime.tv_nsec);
* 1000 * 1000 * 1000 + (nowtime.tv_nsec - audio_decoder->LastTime.tv_nsec);
drift =
(tim_diff * 90) / (1000 * 1000) - pts_diff + delay -
audio_decoder->LastDelay;
drift = (tim_diff * 90) / (1000 * 1000) - pts_diff + delay - audio_decoder->LastDelay;
// adjust rounding error
nowtime.tv_nsec -= nowtime.tv_nsec % (1000 * 1000 / 90);
@ -1190,16 +1155,13 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
audio_decoder->LastDelay = delay;
if (0) {
Debug(3,
"codec/audio: interval P:%5" PRId64 "ms T:%5" PRId64 "ms D:%4"
PRId64 "ms %f %d\n", pts_diff / 90, tim_diff / (1000 * 1000),
delay / 90, drift / 90.0, audio_decoder->DriftCorr);
Debug(3, "codec/audio: interval P:%5" PRId64 "ms T:%5" PRId64 "ms D:%4" PRId64 "ms %f %d\n", pts_diff / 90,
tim_diff / (1000 * 1000), delay / 90, drift / 90.0, audio_decoder->DriftCorr);
}
// underruns and av_resample have the same time :(((
if (abs(drift) > 10 * 90) {
// drift too big, pts changed?
Debug(3, "codec/audio: drift(%6d) %3dms reset\n",
audio_decoder->DriftCorr, drift / 90);
Debug(3, "codec/audio: drift(%6d) %3dms reset\n", audio_decoder->DriftCorr, drift / 90);
audio_decoder->LastDelay = 0;
#ifdef DEBUG
corr = 0; // keep gcc happy
@ -1233,15 +1195,13 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
} else {
distance = (pts_diff * audio_decoder->HwSampleRate) / (90 * 1000);
}
av_resample_compensate(audio_decoder->AvResample,
audio_decoder->DriftCorr / 10, distance);
av_resample_compensate(audio_decoder->AvResample, audio_decoder->DriftCorr / 10, distance);
}
if (1) {
static int c;
if (!(c++ % 10)) {
Debug(3, "codec/audio: drift(%6d) %8dus %5d\n",
audio_decoder->DriftCorr, drift * 1000 / 90, corr);
Debug(3, "codec/audio: drift(%6d) %8dus %5d\n", audio_decoder->DriftCorr, drift * 1000 / 90, corr);
}
}
}
@ -1272,16 +1232,13 @@ static void CodecAudioUpdateFormat(AudioDecoder * audio_decoder)
audio_ctx = audio_decoder->AudioCtx;
if ((err = CodecAudioUpdateHelper(audio_decoder, &passthrough))) {
Debug(3, "codec/audio: resample %dHz *%d -> %dHz *%d err %d\n",
audio_ctx->sample_rate, audio_ctx->channels,
Debug(3, "codec/audio: resample %dHz *%d -> %dHz *%d err %d\n", audio_ctx->sample_rate, audio_ctx->channels,
audio_decoder->HwSampleRate, audio_decoder->HwChannels, err);
if (err == 1) {
audio_decoder->ReSample =
av_audio_resample_init(audio_decoder->HwChannels,
audio_ctx->channels, audio_decoder->HwSampleRate,
audio_ctx->sample_rate, audio_ctx->sample_fmt,
audio_ctx->sample_fmt, 16, 10, 0, 0.8);
av_audio_resample_init(audio_decoder->HwChannels, audio_ctx->channels, audio_decoder->HwSampleRate,
audio_ctx->sample_rate, audio_ctx->sample_fmt, audio_ctx->sample_fmt, 16, 10, 0, 0.8);
// libav-0.8_pre didn't support 6 -> 2 channels
if (!audio_decoder->ReSample) {
Error(_("codec/audio: resample setup error\n"));
@ -1306,16 +1263,14 @@ static void CodecAudioUpdateFormat(AudioDecoder * audio_decoder)
Error(_("codec/audio: overwrite resample\n"));
}
audio_decoder->AvResample =
av_resample_init(audio_decoder->HwSampleRate,
audio_decoder->HwSampleRate, 16, 10, 0, 0.8);
av_resample_init(audio_decoder->HwSampleRate, audio_decoder->HwSampleRate, 16, 10, 0, 0.8);
if (!audio_decoder->AvResample) {
Error(_("codec/audio: AvResample setup error\n"));
} else {
// reset drift to some default value
audio_decoder->DriftCorr /= 2;
audio_decoder->DriftFrac = 0;
av_resample_compensate(audio_decoder->AvResample,
audio_decoder->DriftCorr / 10,
av_resample_compensate(audio_decoder->AvResample, audio_decoder->DriftCorr / 10,
10 * audio_decoder->HwSampleRate);
}
}
@ -1333,8 +1288,8 @@ void CodecAudioEnqueue(AudioDecoder * audio_decoder, int16_t * data, int count)
{
#ifdef USE_AUDIO_DRIFT_CORRECTION
if ((CodecAudioDrift & CORRECT_PCM) && audio_decoder->AvResample) {
int16_t buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 +
AV_INPUT_BUFFER_PADDING_SIZE] __attribute__ ((aligned(16)));
int16_t buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 + AV_INPUT_BUFFER_PADDING_SIZE]
__attribute__((aligned(16)));
int16_t buftmp[MAX_CHANNELS][(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4];
int consumed;
int i;
@ -1347,15 +1302,12 @@ void CodecAudioEnqueue(AudioDecoder * audio_decoder, int16_t * data, int count)
if (audio_decoder->RemainCount + bytes_n > audio_decoder->BufferSize) {
audio_decoder->BufferSize = audio_decoder->RemainCount + bytes_n;
for (ch = 0; ch < MAX_CHANNELS; ++ch) {
audio_decoder->Buffer[ch] =
realloc(audio_decoder->Buffer[ch],
audio_decoder->BufferSize);
audio_decoder->Buffer[ch] = realloc(audio_decoder->Buffer[ch], audio_decoder->BufferSize);
}
}
// copy remaining bytes into sample buffer
for (ch = 0; ch < audio_decoder->HwChannels; ++ch) {
memcpy(audio_decoder->Buffer[ch], audio_decoder->Remain[ch],
audio_decoder->RemainCount);
memcpy(audio_decoder->Buffer[ch], audio_decoder->Remain[ch], audio_decoder->RemainCount);
}
// deinterleave samples into sample buffer
for (i = 0; i < bytes_n / 2; i++) {
@ -1369,18 +1321,14 @@ void CodecAudioEnqueue(AudioDecoder * audio_decoder, int16_t * data, int count)
n = 0; // keep gcc lucky
// resample the sample buffer into tmp buffer
for (ch = 0; ch < audio_decoder->HwChannels; ++ch) {
n = av_resample(audio_decoder->AvResample, buftmp[ch],
audio_decoder->Buffer[ch], &consumed, bytes_n / 2,
n = av_resample(audio_decoder->AvResample, buftmp[ch], audio_decoder->Buffer[ch], &consumed, bytes_n / 2,
sizeof(buftmp[ch]) / 2, ch == audio_decoder->HwChannels - 1);
// fixme remaining channels
if (bytes_n - consumed * 2 > audio_decoder->RemainSize) {
audio_decoder->RemainSize = bytes_n - consumed * 2;
}
audio_decoder->Remain[ch] =
realloc(audio_decoder->Remain[ch], audio_decoder->RemainSize);
memcpy(audio_decoder->Remain[ch],
audio_decoder->Buffer[ch] + consumed,
audio_decoder->RemainSize);
audio_decoder->Remain[ch] = realloc(audio_decoder->Remain[ch], audio_decoder->RemainSize);
memcpy(audio_decoder->Remain[ch], audio_decoder->Buffer[ch] + consumed, audio_decoder->RemainSize);
audio_decoder->RemainCount = audio_decoder->RemainSize;
}
@ -1406,9 +1354,7 @@ void CodecAudioEnqueue(AudioDecoder * audio_decoder, int16_t * data, int count)
AudioEnqueue(data, count);
}
int myavcodec_decode_audio3(AVCodecContext *avctx, int16_t *samples,
int *frame_size_ptr,
AVPacket *avpkt)
int myavcodec_decode_audio3(AVCodecContext * avctx, int16_t * samples, int *frame_size_ptr, AVPacket * avpkt)
{
AVFrame *frame = av_frame_alloc();
int ret, got_frame = 0;
@ -1433,18 +1379,17 @@ int myavcodec_decode_audio3(AVCodecContext *avctx, int16_t *samples,
ret = avcodec_send_packet(avctx, avpkt);
if (ret == AVERROR(EAGAIN))
ret = 0;
else if (ret < 0)
{
else if (ret < 0) {
// Debug(3, "codec/audio: audio decode error: %1 (%2)\n",av_make_error_string(error, sizeof(error), ret),got_frame);
return ret;
}
else
} else
ret = avpkt->size;
#endif
if (ret >= 0 && got_frame) {
int i, ch;
int planar = av_sample_fmt_is_planar(avctx->sample_fmt);
int data_size = av_get_bytes_per_sample(avctx->sample_fmt);
if (data_size < 0) {
/* This should not occur, checking just for paranoia */
fprintf(stderr, "Failed to calculate data size\n");
@ -1465,7 +1410,6 @@ int myavcodec_decode_audio3(AVCodecContext *avctx, int16_t *samples,
return ret;
}
/**
** Decode an audio packet.
**
@ -1476,8 +1420,7 @@ int myavcodec_decode_audio3(AVCodecContext *avctx, int16_t *samples,
*/
void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt)
{
int16_t buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 +
AV_INPUT_BUFFER_PADDING_SIZE] __attribute__ ((aligned(16)));
int16_t buf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 + AV_INPUT_BUFFER_PADDING_SIZE] __attribute__((aligned(16)));
int buf_sz;
int l;
AVCodecContext *audio_ctx;
@ -1503,8 +1446,7 @@ void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt)
CodecAudioSetClock(audio_decoder, avpkt->pts);
}
// FIXME: must first play remainings bytes, than change and play new.
if (audio_decoder->Passthrough != CodecPassthrough
|| audio_decoder->SampleRate != audio_ctx->sample_rate
if (audio_decoder->Passthrough != CodecPassthrough || audio_decoder->SampleRate != audio_ctx->sample_rate
|| audio_decoder->Channels != audio_ctx->channels) {
CodecAudioUpdateFormat(audio_decoder);
}
@ -1512,8 +1454,7 @@ void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt)
if (audio_decoder->HwSampleRate && audio_decoder->HwChannels) {
// need to resample audio
if (audio_decoder->ReSample) {
int16_t outbuf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 +
AV_INPUT_BUFFER_PADDING_SIZE]
int16_t outbuf[(AVCODEC_MAX_AUDIO_FRAME_SIZE * 3) / 4 + AV_INPUT_BUFFER_PADDING_SIZE]
__attribute__((aligned(16)));
int outlen;
@ -1526,11 +1467,8 @@ void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt)
#endif
if (outlen) {
// outlen seems to be wrong in ffmpeg-0.9
outlen /= audio_decoder->Channels *
av_get_bytes_per_sample(audio_ctx->sample_fmt);
outlen *=
audio_decoder->HwChannels *
av_get_bytes_per_sample(audio_ctx->sample_fmt);
outlen /= audio_decoder->Channels * av_get_bytes_per_sample(audio_ctx->sample_fmt);
outlen *= audio_decoder->HwChannels * av_get_bytes_per_sample(audio_ctx->sample_fmt);
Debug(4, "codec/audio: %d -> %d\n", buf_sz, outlen);
CodecAudioEnqueue(audio_decoder, outbuf, outlen);
}
@ -1577,8 +1515,7 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
audio_decoder->LastDelay = delay;
audio_decoder->Drift = 0;
audio_decoder->DriftFrac = 0;
Debug(3, "codec/audio: inital drift delay %" PRId64 "ms\n",
delay / 90);
Debug(3, "codec/audio: inital drift delay %" PRId64 "ms\n", delay / 90);
return;
}
// collect over some time
@ -1588,12 +1525,9 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
}
tim_diff = (nowtime.tv_sec - audio_decoder->LastTime.tv_sec)
* 1000 * 1000 * 1000 + (nowtime.tv_nsec -
audio_decoder->LastTime.tv_nsec);
* 1000 * 1000 * 1000 + (nowtime.tv_nsec - audio_decoder->LastTime.tv_nsec);
drift =
(tim_diff * 90) / (1000 * 1000) - pts_diff + delay -
audio_decoder->LastDelay;
drift = (tim_diff * 90) / (1000 * 1000) - pts_diff + delay - audio_decoder->LastDelay;
// adjust rounding error
nowtime.tv_nsec -= nowtime.tv_nsec % (1000 * 1000 / 90);
@ -1602,16 +1536,13 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
audio_decoder->LastDelay = delay;
if (0) {
Debug(3,
"codec/audio: interval P:%5" PRId64 "ms T:%5" PRId64 "ms D:%4"
PRId64 "ms %f %d\n", pts_diff / 90, tim_diff / (1000 * 1000),
delay / 90, drift / 90.0, audio_decoder->DriftCorr);
Debug(3, "codec/audio: interval P:%5" PRId64 "ms T:%5" PRId64 "ms D:%4" PRId64 "ms %f %d\n", pts_diff / 90,
tim_diff / (1000 * 1000), delay / 90, drift / 90.0, audio_decoder->DriftCorr);
}
// underruns and av_resample have the same time :(((
if (abs(drift) > 10 * 90) {
// drift too big, pts changed?
Debug(3, "codec/audio: drift(%6d) %3dms reset\n",
audio_decoder->DriftCorr, drift / 90);
Debug(3, "codec/audio: drift(%6d) %3dms reset\n", audio_decoder->DriftCorr, drift / 90);
audio_decoder->LastDelay = 0;
#ifdef DEBUG
corr = 0; // keep gcc happy
@ -1646,8 +1577,7 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
} else {
distance = (pts_diff * audio_decoder->HwSampleRate) / (90 * 1000);
}
if (swr_set_compensation(audio_decoder->Resample,
audio_decoder->DriftCorr / 10, distance)) {
if (swr_set_compensation(audio_decoder->Resample, audio_decoder->DriftCorr / 10, distance)) {
Debug(3, "codec/audio: swr_set_compensation failed\n");
}
}
@ -1657,8 +1587,7 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
int distance;
distance = (pts_diff * audio_decoder->HwSampleRate) / (900 * 1000);
if (avresample_set_compensation(audio_decoder->Resample,
audio_decoder->DriftCorr / 10, distance)) {
if (avresample_set_compensation(audio_decoder->Resample, audio_decoder->DriftCorr / 10, distance)) {
Debug(3, "codec/audio: swr_set_compensation failed\n");
}
}
@ -1667,8 +1596,7 @@ static void CodecAudioSetClock(AudioDecoder * audio_decoder, int64_t pts)
static int c;
if (!(c++ % 10)) {
Debug(3, "codec/audio: drift(%6d) %8dus %5d\n",
audio_decoder->DriftCorr, drift * 1000 / 90, corr);
Debug(3, "codec/audio: drift(%6d) %8dus %5d\n", audio_decoder->DriftCorr, drift * 1000 / 90, corr);
}
}
#else
@ -1697,8 +1625,7 @@ static void CodecAudioUpdateFormat(AudioDecoder * audio_decoder)
audio_ctx = audio_decoder->AudioCtx;
#ifdef DEBUG
if (audio_ctx->sample_fmt == AV_SAMPLE_FMT_S16
&& audio_ctx->sample_rate == audio_decoder->HwSampleRate
if (audio_ctx->sample_fmt == AV_SAMPLE_FMT_S16 && audio_ctx->sample_rate == audio_decoder->HwSampleRate
&& !CodecAudioDrift) {
// FIXME: use Resample only, when it is needed!
fprintf(stderr, "no resample needed\n");
@ -1707,10 +1634,9 @@ static void CodecAudioUpdateFormat(AudioDecoder * audio_decoder)
#ifdef USE_SWRESAMPLE
audio_decoder->Resample =
swr_alloc_set_opts(audio_decoder->Resample, audio_ctx->channel_layout,
AV_SAMPLE_FMT_S16, audio_decoder->HwSampleRate,
audio_ctx->channel_layout, audio_ctx->sample_fmt,
audio_ctx->sample_rate, 0, NULL);
swr_alloc_set_opts(audio_decoder->Resample, audio_ctx->channel_layout, AV_SAMPLE_FMT_S16,
audio_decoder->HwSampleRate, audio_ctx->channel_layout, audio_ctx->sample_fmt, audio_ctx->sample_rate, 0,
NULL);
if (audio_decoder->Resample) {
swr_init(audio_decoder->Resample);
} else {
@ -1723,18 +1649,12 @@ static void CodecAudioUpdateFormat(AudioDecoder * audio_decoder)
return;
}
av_opt_set_int(audio_decoder->Resample, "in_channel_layout",
audio_ctx->channel_layout, 0);
av_opt_set_int(audio_decoder->Resample, "in_sample_fmt",
audio_ctx->sample_fmt, 0);
av_opt_set_int(audio_decoder->Resample, "in_sample_rate",
audio_ctx->sample_rate, 0);
av_opt_set_int(audio_decoder->Resample, "out_channel_layout",
audio_ctx->channel_layout, 0);
av_opt_set_int(audio_decoder->Resample, "out_sample_fmt",
AV_SAMPLE_FMT_S16, 0);
av_opt_set_int(audio_decoder->Resample, "out_sample_rate",
audio_decoder->HwSampleRate, 0);
av_opt_set_int(audio_decoder->Resample, "in_channel_layout", audio_ctx->channel_layout, 0);
av_opt_set_int(audio_decoder->Resample, "in_sample_fmt", audio_ctx->sample_fmt, 0);
av_opt_set_int(audio_decoder->Resample, "in_sample_rate", audio_ctx->sample_rate, 0);
av_opt_set_int(audio_decoder->Resample, "out_channel_layout", audio_ctx->channel_layout, 0);
av_opt_set_int(audio_decoder->Resample, "out_sample_fmt", AV_SAMPLE_FMT_S16, 0);
av_opt_set_int(audio_decoder->Resample, "out_sample_rate", audio_decoder->HwSampleRate, 0);
if (avresample_open(audio_decoder->Resample)) {
avresample_free(&audio_decoder->Resample);
@ -1799,7 +1719,8 @@ void CodecAudioDecode(AudioDecoder * audio_decoder, const AVPacket * avpkt)
uint8_t *out[1];
out[0] = outbuf;
ret = swr_convert(audio_decoder->Resample, out, sizeof(outbuf) / (2 * audio_decoder->HwChannels),
ret =
swr_convert(audio_decoder->Resample, out, sizeof(outbuf) / (2 * audio_decoder->HwChannels),
(const uint8_t **)frame->extended_data, frame->nb_samples);
if (ret > 0) {
if (!(audio_decoder->Passthrough & CodecPCM)) {

View File

@ -35,7 +35,8 @@
#define AVCODEC_MAX_AUDIO_FRAME_SIZE 192000
enum HWAccelID {
enum HWAccelID
{
HWACCEL_NONE = 0,
HWACCEL_AUTO,
HWACCEL_VDPAU,
@ -48,6 +49,7 @@ enum HWAccelID {
};
extern AVBufferRef *hw_device_ctx;
///
/// Video decoder structure.
///
@ -86,8 +88,6 @@ struct _video_decoder_
// From VO
struct mp_hwdec_devices *hwdec_devs;
};
//----------------------------------------------------------------------------

View File

@ -1,3 +1,4 @@
/*
* This file is part of mpv.
*
@ -52,7 +53,8 @@
struct GL;
typedef struct GL GL;
enum {
enum
{
MPGL_CAP_ROW_LENGTH = (1 << 4), // GL_[UN]PACK_ROW_LENGTH
MPGL_CAP_FB = (1 << 5),
MPGL_CAP_VAO = (1 << 6),
@ -78,16 +80,15 @@ enum {
#define MPGL_VER_P(ver) MPGL_VER_GET_MAJOR(ver), MPGL_VER_GET_MINOR(ver)
void mpgl_load_functions(GL *gl, void *(*getProcAddress)(const GLubyte *),
const char *ext2, struct mp_log *log);
void mpgl_load_functions2(GL *gl, void *(*get_fn)(void *ctx, const char *n),
void *fn_ctx, const char *ext2, struct mp_log *log);
void mpgl_load_functions(GL * gl, void *(*getProcAddress)(const GLubyte *), const char *ext2, struct mp_log *log);
void mpgl_load_functions2(GL * gl, void *(*get_fn)(void *ctx, const char *n), void *fn_ctx, const char *ext2,
struct mp_log *log);
typedef void (GLAPIENTRY *MP_GLDEBUGPROC)(GLenum, GLenum, GLuint, GLenum,
GLsizei, const GLchar *,const void *);
typedef void (GLAPIENTRY * MP_GLDEBUGPROC) (GLenum, GLenum, GLuint, GLenum, GLsizei, const GLchar *, const void *);
//function pointers loaded from the OpenGL library
struct GL {
struct GL
{
int version; // MPGL_VER() mangled (e.g. 210 for 2.1)
int es; // es version (e.g. 300), 0 for desktop GL
int glsl_version; // e.g. 130 for GLSL 1.30
@ -108,17 +109,12 @@ struct GL {
void (GLAPIENTRY * Flush) (void);
void (GLAPIENTRY * Finish) (void);
void (GLAPIENTRY * PixelStorei) (GLenum, GLint);
void (GLAPIENTRY *TexImage1D)(GLenum, GLint, GLint, GLsizei, GLint,
GLenum, GLenum, const GLvoid *);
void (GLAPIENTRY *TexImage2D)(GLenum, GLint, GLint, GLsizei, GLsizei,
GLint, GLenum, GLenum, const GLvoid *);
void (GLAPIENTRY *TexSubImage2D)(GLenum, GLint, GLint, GLint,
GLsizei, GLsizei, GLenum, GLenum,
const GLvoid *);
void (GLAPIENTRY * TexImage1D) (GLenum, GLint, GLint, GLsizei, GLint, GLenum, GLenum, const GLvoid *);
void (GLAPIENTRY * TexImage2D) (GLenum, GLint, GLint, GLsizei, GLsizei, GLint, GLenum, GLenum, const GLvoid *);
void (GLAPIENTRY * TexSubImage2D) (GLenum, GLint, GLint, GLint, GLsizei, GLsizei, GLenum, GLenum, const GLvoid *);
void (GLAPIENTRY * TexParameteri) (GLenum, GLenum, GLint);
void (GLAPIENTRY * GetIntegerv) (GLenum, GLint *);
void (GLAPIENTRY *ReadPixels)(GLint, GLint, GLsizei, GLsizei, GLenum,
GLenum, GLvoid *);
void (GLAPIENTRY * ReadPixels) (GLint, GLint, GLsizei, GLsizei, GLenum, GLenum, GLvoid *);
void (GLAPIENTRY * ReadBuffer) (GLenum);
void (GLAPIENTRY * DrawArrays) (GLenum, GLint, GLsizei);
GLenum(GLAPIENTRY * GetError) (void);
@ -129,15 +125,13 @@ struct GL {
void (GLAPIENTRY * DeleteBuffers) (GLsizei, const GLuint *);
void (GLAPIENTRY * BindBuffer) (GLenum, GLuint);
void (GLAPIENTRY * BindBufferBase) (GLenum, GLuint, GLuint);
GLvoid * (GLAPIENTRY *MapBufferRange)(GLenum, GLintptr, GLsizeiptr,
GLbitfield);
GLvoid *(GLAPIENTRY * MapBufferRange) (GLenum, GLintptr, GLsizeiptr, GLbitfield);
GLboolean(GLAPIENTRY * UnmapBuffer) (GLenum);
void (GLAPIENTRY * BufferData) (GLenum, intptr_t, const GLvoid *, GLenum);
void (GLAPIENTRY * ActiveTexture) (GLenum);
void (GLAPIENTRY * BindTexture) (GLenum, GLuint);
int (GLAPIENTRY * SwapInterval) (int);
void (GLAPIENTRY *TexImage3D)(GLenum, GLint, GLenum, GLsizei, GLsizei,
GLsizei, GLint, GLenum, GLenum,
void (GLAPIENTRY * TexImage3D) (GLenum, GLint, GLenum, GLsizei, GLsizei, GLsizei, GLint, GLenum, GLenum,
const GLvoid *);
void (GLAPIENTRY * GenVertexArrays) (GLsizei, GLuint *);
@ -145,16 +139,14 @@ struct GL {
GLint(GLAPIENTRY * GetAttribLocation) (GLuint, const GLchar *);
void (GLAPIENTRY * EnableVertexAttribArray) (GLuint);
void (GLAPIENTRY * DisableVertexAttribArray) (GLuint);
void (GLAPIENTRY *VertexAttribPointer)(GLuint, GLint, GLenum, GLboolean,
GLsizei, const GLvoid *);
void (GLAPIENTRY * VertexAttribPointer) (GLuint, GLint, GLenum, GLboolean, GLsizei, const GLvoid *);
void (GLAPIENTRY * DeleteVertexArrays) (GLsizei, const GLuint *);
void (GLAPIENTRY * UseProgram) (GLuint);
GLint(GLAPIENTRY * GetUniformLocation) (GLuint, const GLchar *);
void (GLAPIENTRY * CompileShader) (GLuint);
GLuint(GLAPIENTRY * CreateProgram) (void);
GLuint(GLAPIENTRY * CreateShader) (GLenum);
void (GLAPIENTRY *ShaderSource)(GLuint, GLsizei, const GLchar **,
const GLint *);
void (GLAPIENTRY * ShaderSource) (GLuint, GLsizei, const GLchar **, const GLint *);
void (GLAPIENTRY * LinkProgram) (GLuint);
void (GLAPIENTRY * AttachShader) (GLuint, GLuint);
void (GLAPIENTRY * DeleteShader) (GLuint);
@ -169,22 +161,17 @@ struct GL {
void (GLAPIENTRY * GenFramebuffers) (GLsizei, GLuint *);
void (GLAPIENTRY * DeleteFramebuffers) (GLsizei, const GLuint *);
GLenum(GLAPIENTRY * CheckFramebufferStatus) (GLenum);
void (GLAPIENTRY *FramebufferTexture2D)(GLenum, GLenum, GLenum, GLuint,
GLint);
void (GLAPIENTRY *BlitFramebuffer)(GLint, GLint, GLint, GLint, GLint, GLint,
GLint, GLint, GLbitfield, GLenum);
void (GLAPIENTRY *GetFramebufferAttachmentParameteriv)(GLenum, GLenum,
GLenum, GLint *);
void (GLAPIENTRY * FramebufferTexture2D) (GLenum, GLenum, GLenum, GLuint, GLint);
void (GLAPIENTRY * BlitFramebuffer) (GLint, GLint, GLint, GLint, GLint, GLint, GLint, GLint, GLbitfield, GLenum);
void (GLAPIENTRY * GetFramebufferAttachmentParameteriv) (GLenum, GLenum, GLenum, GLint *);
void (GLAPIENTRY * Uniform1f) (GLint, GLfloat);
void (GLAPIENTRY * Uniform2f) (GLint, GLfloat, GLfloat);
void (GLAPIENTRY * Uniform3f) (GLint, GLfloat, GLfloat, GLfloat);
void (GLAPIENTRY * Uniform4f) (GLint, GLfloat, GLfloat, GLfloat, GLfloat);
void (GLAPIENTRY * Uniform1i) (GLint, GLint);
void (GLAPIENTRY *UniformMatrix2fv)(GLint, GLsizei, GLboolean,
const GLfloat *);
void (GLAPIENTRY *UniformMatrix3fv)(GLint, GLsizei, GLboolean,
const GLfloat *);
void (GLAPIENTRY * UniformMatrix2fv) (GLint, GLsizei, GLboolean, const GLfloat *);
void (GLAPIENTRY * UniformMatrix3fv) (GLint, GLsizei, GLboolean, const GLfloat *);
void (GLAPIENTRY * InvalidateFramebuffer) (GLenum, GLsizei, const GLenum *);
@ -216,27 +203,21 @@ struct GL {
#if HAVE_GL_WIN32
// The HANDLE type might not be present on non-Win32
BOOL (GLAPIENTRY *DXSetResourceShareHandleNV)(void *dxObject,
HANDLE shareHandle);
BOOL(GLAPIENTRY * DXSetResourceShareHandleNV) (void *dxObject, HANDLE shareHandle);
HANDLE(GLAPIENTRY * DXOpenDeviceNV) (void *dxDevice);
BOOL(GLAPIENTRY * DXCloseDeviceNV) (HANDLE hDevice);
HANDLE (GLAPIENTRY *DXRegisterObjectNV)(HANDLE hDevice, void *dxObject,
GLuint name, GLenum type, GLenum access);
HANDLE(GLAPIENTRY * DXRegisterObjectNV) (HANDLE hDevice, void *dxObject, GLuint name, GLenum type, GLenum access);
BOOL(GLAPIENTRY * DXUnregisterObjectNV) (HANDLE hDevice, HANDLE hObject);
BOOL (GLAPIENTRY *DXLockObjectsNV)(HANDLE hDevice, GLint count,
HANDLE *hObjects);
BOOL (GLAPIENTRY *DXUnlockObjectsNV)(HANDLE hDevice, GLint count,
HANDLE *hObjects);
BOOL(GLAPIENTRY * DXLockObjectsNV) (HANDLE hDevice, GLint count, HANDLE * hObjects);
BOOL(GLAPIENTRY * DXUnlockObjectsNV) (HANDLE hDevice, GLint count, HANDLE * hObjects);
#endif
GLint(GLAPIENTRY * GetVideoSync) (GLuint *);
GLint(GLAPIENTRY * WaitVideoSync) (GLint, GLint, unsigned int *);
void (GLAPIENTRY *GetTranslatedShaderSourceANGLE)(GLuint, GLsizei,
GLsizei*, GLchar* source);
void (GLAPIENTRY * GetTranslatedShaderSourceANGLE) (GLuint, GLsizei, GLsizei *, GLchar * source);
void (GLAPIENTRY *DebugMessageCallback)(MP_GLDEBUGPROC callback,
const void *userParam);
void (GLAPIENTRY * DebugMessageCallback) (MP_GLDEBUGPROC callback, const void *userParam);
void *(GLAPIENTRY * MPGetNativeDisplay) (const char *name);
};

View File

@ -1,3 +1,4 @@
/*
* This file is part of libplacebo.
*
@ -39,5 +40,4 @@
#define PL_HAVE_SHADERC 0
#define PL_HAVE_VULKAN 1
#endif // LIBPLACEBO_CONTEXT_H_

View File

@ -1,3 +1,4 @@
/*
* Copyright 1993-2013 NVIDIA Corporation. All rights reserved.
*
@ -28,8 +29,8 @@ typedef struct
/**
* Error codes
*/
s_CudaErrorStr sCudaDrvErrorString[] =
{
s_CudaErrorStr sCudaDrvErrorString[] = {
/**
* The API call returned with no errors. In the case of query calls, this
* can also mean that the operation being queried is complete (see
@ -65,21 +66,25 @@ s_CudaErrorStr sCudaDrvErrorString[] =
* in visual profiler mode.
*/
{"CUDA_ERROR_PROFILER_DISABLED", 5},
/**
* This indicates profiling has not been initialized for this context.
* Call cuProfilerInitialize() to resolve this.
*/
{"CUDA_ERROR_PROFILER_NOT_INITIALIZED", 6},
/**
* This indicates profiler has already been started and probably
* cuProfilerStart() is incorrectly called.
*/
{"CUDA_ERROR_PROFILER_ALREADY_STARTED", 7},
/**
* This indicates profiler has already been stopped and probably
* cuProfilerStop() is incorrectly called.
*/
{"CUDA_ERROR_PROFILER_ALREADY_STOPPED", 8},
/**
* This indicates that no CUDA-capable devices were detected by the installed
* CUDA driver.
@ -92,7 +97,6 @@ s_CudaErrorStr sCudaDrvErrorString[] =
*/
{"CUDA_ERROR_INVALID_DEVICE (device specified is not a valid CUDA device)", 101},
/**
* This indicates that the device kernel image is invalid. This can also
* indicate an invalid CUDA module.
@ -240,21 +244,18 @@ s_CudaErrorStr sCudaDrvErrorString[] =
*/
{"CUDA_ERROR_OPERATING_SYSTEM", 304},
/**
* This indicates that a resource handle passed to the API call was not
* valid. Resource handles are opaque types like ::CUstream and ::CUevent.
*/
{"CUDA_ERROR_INVALID_HANDLE", 400},
/**
* This indicates that a named symbol was not found. Examples of symbols
* are global/constant variable names, texture names }, and surface names.
*/
{"CUDA_ERROR_NOT_FOUND", 500},
/**
* This indicates that asynchronous operations issued previously have not
* completed yet. This result is not actually an error, but must be indicated
@ -263,7 +264,6 @@ s_CudaErrorStr sCudaDrvErrorString[] =
*/
{"CUDA_ERROR_NOT_READY", 600},
/**
* While executing a kernel, the device encountered a
* load or store instruction on an invalid memory address.
@ -419,7 +419,6 @@ s_CudaErrorStr sCudaDrvErrorString[] =
*/
{"CUDA_ERROR_COOPERATIVE_LAUNCH_TOO_LARGE", 720},
/**
* This error indicates that the attempted operation is not permitted.
*/
@ -431,7 +430,6 @@ s_CudaErrorStr sCudaDrvErrorString[] =
*/
{"CUDA_ERROR_NOT_SUPPORTED", 801},
/**
* This indicates that an unknown internal error has occurred.
*/
@ -445,9 +443,7 @@ static inline const char *getCudaDrvErrorString(CUresult error_id)
{
int index = 0;
while (sCudaDrvErrorString[index].error_id != error_id &&
(int)sCudaDrvErrorString[index].error_id != -1)
{
while (sCudaDrvErrorString[index].error_id != error_id && (int)sCudaDrvErrorString[index].error_id != -1) {
index++;
}
@ -459,5 +455,4 @@ static inline const char *getCudaDrvErrorString(CUresult error_id)
#endif // __cuda_cuda_h__
#endif

6
misc.h
View File

@ -125,9 +125,8 @@ static inline const char *Timestamp2String(int64_t ts)
return "--:--:--.---";
}
idx = (idx + 1) % 3;
snprintf(buf[idx], sizeof(buf[idx]), "%2d:%02d:%02d.%03d",
(int)(ts / (90 * 3600000)), (int)((ts / (90 * 60000)) % 60),
(int)((ts / (90 * 1000)) % 60), (int)((ts / 90) % 1000));
snprintf(buf[idx], sizeof(buf[idx]), "%2d:%02d:%02d.%03d", (int)(ts / (90 * 3600000)),
(int)((ts / (90 * 60000)) % 60), (int)((ts / (90 * 1000)) % 60), (int)((ts / 90) % 1000));
return buf[idx];
}
@ -153,6 +152,7 @@ static inline uint32_t GetMsTicks(void)
return (tval.tv_sec * 1000) + (tval.tv_usec / 1000);
#endif
}
static inline uint64_t GetusTicks(void)
{

View File

@ -616,6 +616,7 @@ void cOglFb::BindWrite(void) {
}
void cOglFb::Unbind(void) {
glFinish();
glBindFramebuffer(GL_FRAMEBUFFER, 0);
glBindTexture(GL_TEXTURE_2D, 0);
}

View File

@ -20,22 +20,18 @@
#define FT_ERRORDEF( e, v, s ) { e, s },
#define FT_ERROR_START_LIST {
#define FT_ERROR_END_LIST { 0, 0 } };
const struct {
const struct
{
int code;
const char *message;
} FT_Errors[] =
#include FT_ERRORS_H
#include <memory>
#include <queue>
#include <vdr/plugin.h>
#include <vdr/osd.h>
#include <vdr/thread.h>
#include "softhddev.h"
extern "C"
{
#include <stdint.h>
@ -49,7 +45,8 @@ extern "C"
extern "C" pthread_mutex_t OSDMutex;
struct sOglImage {
struct sOglImage
{
GLuint texture;
GLint width;
GLint height;
@ -65,22 +62,28 @@ void ConvertColor(const GLint &colARGB, glm::vec4 &col);
/****************************************************************************************
* cShader
****************************************************************************************/
enum eShaderType {
enum eShaderType
{
stRect,
stTexture,
stText,
stCount
};
class cShader {
class cShader
{
private:
eShaderType type;
GLuint id;
bool Compile(const char *vertexCode, const char *fragmentCode);
bool CheckCompileErrors(GLuint object, bool program = false);
public:
cShader(void) {};
virtual ~cShader(void) {};
cShader(void)
{
};
virtual ~ cShader(void)
{
};
bool Load(eShaderType type);
void Use(void);
void SetFloat(const GLchar * name, GLfloat value);
@ -94,9 +97,11 @@ public:
/****************************************************************************************
* cOglGlyph
****************************************************************************************/
class cOglGlyph : public cListObject {
class cOglGlyph:public cListObject
{
private:
struct tKerning {
struct tKerning
{
public:
tKerning(uint prevSym, GLfloat kerning = 0.0f) {
this->prevSym = prevSym;
@ -111,18 +116,38 @@ private:
int width;
int height;
int advanceX;
cVector < tKerning > kerningCache;
GLuint texture;
void LoadTexture(FT_BitmapGlyph ftGlyph);
public:
cOglGlyph(uint charCode, FT_BitmapGlyph ftGlyph);
virtual ~ cOglGlyph();
uint CharCode(void) { return charCode; }
int AdvanceX(void) { return advanceX; }
int BearingLeft(void) const { return bearingLeft; }
int BearingTop(void) const { return bearingTop; }
int Width(void) const { return width; }
int Height(void) const { return height; }
uint CharCode(void)
{
return charCode;
}
int AdvanceX(void)
{
return advanceX;
}
int BearingLeft(void) const
{
return bearingLeft;
}
int BearingTop(void) const
{
return bearingTop;
}
int Width(void) const
{
return width;
}
int Height(void) const
{
return height;
}
int GetKerningCache(uint prevSym);
void SetKerningCache(uint prevSym, int kerning);
void BindTexture(void);
@ -131,7 +156,8 @@ public:
/****************************************************************************************
* cOglFont
****************************************************************************************/
class cOglFont : public cListObject {
class cOglFont:public cListObject
{
private:
static bool initiated;
cString name;
@ -148,10 +174,22 @@ public:
virtual ~ cOglFont(void);
static cOglFont *Get(const char *name, int charHeight);
static void Cleanup(void);
const char *Name(void) { return *name; };
int Size(void) { return size; };
int Bottom(void) {return bottom; };
int Height(void) {return height; };
const char *Name(void)
{
return *name;
};
int Size(void)
{
return size;
};
int Bottom(void)
{
return bottom;
};
int Height(void)
{
return height;
};
cOglGlyph *Glyph(uint charCode) const;
int Kerning(cOglGlyph * glyph, uint prevSym) const;
};
@ -160,7 +198,8 @@ public:
* cOglFb
* Framebuffer Object - OpenGL part of a Pixmap
****************************************************************************************/
class cOglFb {
class cOglFb
{
protected:
bool initiated;
// GLuint fb;
@ -174,7 +213,10 @@ public:
cOglFb(GLint width, GLint height, GLint viewPortWidth, GLint viewPortHeight);
virtual ~ cOglFb(void);
bool Initiated(void) { return initiated; }
bool Initiated(void)
{
return initiated;
}
virtual bool Init(void);
void Bind(void);
void BindRead(void);
@ -182,18 +224,34 @@ public:
virtual void Unbind(void);
bool BindTexture(void);
void Blit(GLint destX1, GLint destY1, GLint destX2, GLint destY2);
GLint Width(void) { return width; };
GLint Height(void) { return height; };
bool Scrollable(void) { return scrollable; };
GLint ViewportWidth(void) { return viewPortWidth; };
GLint ViewportHeight(void) { return viewPortHeight; };
GLint Width(void)
{
return width;
};
GLint Height(void)
{
return height;
};
bool Scrollable(void)
{
return scrollable;
};
GLint ViewportWidth(void)
{
return viewPortWidth;
};
GLint ViewportHeight(void)
{
return viewPortHeight;
};
};
/****************************************************************************************
* cOglOutputFb
* Output Framebuffer Object - holds Vdpau Output Surface which is our "output framebuffer"
****************************************************************************************/
class cOglOutputFb : public cOglFb {
class cOglOutputFb:public cOglFb
{
protected:
bool initiated;
private:
@ -212,7 +270,8 @@ public:
* cOglVb
* Vertex Buffer - OpenGl Vertices for the different drawing commands
****************************************************************************************/
enum eVertexBufferType {
enum eVertexBufferType
{
vbRect,
vbEllipse,
vbSlope,
@ -221,7 +280,8 @@ enum eVertexBufferType {
vbCount
};
class cOglVb {
class cOglVb
{
private:
eVertexBufferType type;
eShaderType shader;
@ -250,91 +310,141 @@ public:
/****************************************************************************************
* cOpenGLCmd
****************************************************************************************/
class cOglCmd {
class cOglCmd
{
protected:
cOglFb * fb;
public:
cOglCmd(cOglFb *fb) { this->fb = fb; };
virtual ~cOglCmd(void) {};
cOglCmd(cOglFb * fb)
{
this->fb = fb;
};
virtual ~ cOglCmd(void)
{
};
virtual const char *Description(void) = 0;
virtual bool Execute(void) = 0;
};
class cOglCmdInitOutputFb : public cOglCmd {
class cOglCmdInitOutputFb:public cOglCmd
{
private:
cOglOutputFb * oFb;
public:
cOglCmdInitOutputFb(cOglOutputFb * oFb);
virtual ~cOglCmdInitOutputFb(void) {};
virtual const char* Description(void) { return "InitOutputFramebuffer"; }
virtual ~ cOglCmdInitOutputFb(void)
{
};
virtual const char *Description(void)
{
return "InitOutputFramebuffer";
}
virtual bool Execute(void);
};
class cOglCmdInitFb : public cOglCmd {
class cOglCmdInitFb:public cOglCmd
{
private:
cCondWait * wait;
public:
cOglCmdInitFb(cOglFb * fb, cCondWait * wait = NULL);
virtual ~cOglCmdInitFb(void) {};
virtual const char* Description(void) { return "InitFramebuffer"; }
virtual ~ cOglCmdInitFb(void)
{
};
virtual const char *Description(void)
{
return "InitFramebuffer";
}
virtual bool Execute(void);
};
class cOglCmdDeleteFb : public cOglCmd {
class cOglCmdDeleteFb:public cOglCmd
{
public:
cOglCmdDeleteFb(cOglFb * fb);
virtual ~cOglCmdDeleteFb(void) {};
virtual const char* Description(void) { return "DeleteFramebuffer"; }
virtual ~ cOglCmdDeleteFb(void)
{
};
virtual const char *Description(void)
{
return "DeleteFramebuffer";
}
virtual bool Execute(void);
};
class cOglCmdRenderFbToBufferFb : public cOglCmd {
class cOglCmdRenderFbToBufferFb:public cOglCmd
{
private:
cOglFb * buffer;
GLfloat x, y;
GLfloat drawPortX, drawPortY;
GLint transparency;
public:
cOglCmdRenderFbToBufferFb(cOglFb *fb, cOglFb *buffer, GLint x, GLint y, GLint transparency, GLint drawPortX, GLint drawPortY);
virtual ~cOglCmdRenderFbToBufferFb(void) {};
virtual const char* Description(void) { return "Render Framebuffer to Buffer"; }
cOglCmdRenderFbToBufferFb(cOglFb * fb, cOglFb * buffer, GLint x, GLint y, GLint transparency, GLint drawPortX,
GLint drawPortY);
virtual ~ cOglCmdRenderFbToBufferFb(void)
{
};
virtual const char *Description(void)
{
return "Render Framebuffer to Buffer";
}
virtual bool Execute(void);
};
class cOglCmdCopyBufferToOutputFb : public cOglCmd {
class cOglCmdCopyBufferToOutputFb:public cOglCmd
{
private:
cOglOutputFb * oFb;
GLint x, y;
public:
cOglCmdCopyBufferToOutputFb(cOglFb * fb, cOglOutputFb * oFb, GLint x, GLint y);
virtual ~cOglCmdCopyBufferToOutputFb(void) {};
virtual const char* Description(void) { return "Copy buffer to OutputFramebuffer"; }
virtual ~ cOglCmdCopyBufferToOutputFb(void)
{
};
virtual const char *Description(void)
{
return "Copy buffer to OutputFramebuffer";
}
virtual bool Execute(void);
};
class cOglCmdFill : public cOglCmd {
class cOglCmdFill:public cOglCmd
{
private:
GLint color;
public:
cOglCmdFill(cOglFb * fb, GLint color);
virtual ~cOglCmdFill(void) {};
virtual const char* Description(void) { return "Fill"; }
virtual ~ cOglCmdFill(void)
{
};
virtual const char *Description(void)
{
return "Fill";
}
virtual bool Execute(void);
};
class cOglCmdDrawRectangle : public cOglCmd {
class cOglCmdDrawRectangle:public cOglCmd
{
private:
GLint x, y;
GLint width, height;
GLint color;
public:
cOglCmdDrawRectangle(cOglFb * fb, GLint x, GLint y, GLint width, GLint height, GLint color);
virtual ~cOglCmdDrawRectangle(void) {};
virtual const char* Description(void) { return "DrawRectangle"; }
virtual ~ cOglCmdDrawRectangle(void)
{
};
virtual const char *Description(void)
{
return "DrawRectangle";
}
virtual bool Execute(void);
};
class cOglCmdDrawEllipse : public cOglCmd {
class cOglCmdDrawEllipse:public cOglCmd
{
private:
GLint x, y;
GLint width, height;
@ -345,12 +455,18 @@ private:
GLfloat *CreateVerticesHalf(int &numVertices);
public:
cOglCmdDrawEllipse(cOglFb * fb, GLint x, GLint y, GLint width, GLint height, GLint color, GLint quadrants);
virtual ~cOglCmdDrawEllipse(void) {};
virtual const char* Description(void) { return "DrawEllipse"; }
virtual ~ cOglCmdDrawEllipse(void)
{
};
virtual const char *Description(void)
{
return "DrawEllipse";
}
virtual bool Execute(void);
};
class cOglCmdDrawSlope : public cOglCmd {
class cOglCmdDrawSlope:public cOglCmd
{
private:
GLint x, y;
GLint width, height;
@ -358,12 +474,18 @@ private:
GLint type;
public:
cOglCmdDrawSlope(cOglFb * fb, GLint x, GLint y, GLint width, GLint height, GLint color, GLint type);
virtual ~cOglCmdDrawSlope(void) {};
virtual const char* Description(void) { return "DrawSlope"; }
virtual ~ cOglCmdDrawSlope(void)
{
};
virtual const char *Description(void)
{
return "DrawSlope";
}
virtual bool Execute(void);
};
class cOglCmdDrawText : public cOglCmd {
class cOglCmdDrawText:public cOglCmd
{
private:
GLint x, y;
GLint limitX;
@ -372,55 +494,80 @@ private:
int fontSize;
unsigned int *symbols;
public:
cOglCmdDrawText(cOglFb *fb, GLint x, GLint y, unsigned int *symbols, GLint limitX, const char *name, int fontSize, tColor colorText);
cOglCmdDrawText(cOglFb * fb, GLint x, GLint y, unsigned int *symbols, GLint limitX, const char *name,
int fontSize, tColor colorText);
virtual ~ cOglCmdDrawText(void);
virtual const char* Description(void) { return "DrawText"; }
virtual const char *Description(void)
{
return "DrawText";
}
virtual bool Execute(void);
};
class cOglCmdDrawImage : public cOglCmd {
class cOglCmdDrawImage:public cOglCmd
{
private:
tColor * argb;
GLint x, y, width, height;
bool overlay;
GLfloat scaleX, scaleY;
public:
cOglCmdDrawImage(cOglFb *fb, tColor *argb, GLint width, GLint height, GLint x, GLint y, bool overlay = true, double scaleX = 1.0f, double scaleY = 1.0f);
cOglCmdDrawImage(cOglFb * fb, tColor * argb, GLint width, GLint height, GLint x, GLint y, bool overlay =
true, double scaleX = 1.0f, double scaleY = 1.0f);
virtual ~ cOglCmdDrawImage(void);
virtual const char* Description(void) { return "Draw Image"; }
virtual const char *Description(void)
{
return "Draw Image";
}
virtual bool Execute(void);
};
class cOglCmdDrawTexture : public cOglCmd {
class cOglCmdDrawTexture:public cOglCmd
{
private:
sOglImage * imageRef;
GLint x, y;
public:
cOglCmdDrawTexture(cOglFb * fb, sOglImage * imageRef, GLint x, GLint y);
virtual ~cOglCmdDrawTexture(void) {};
virtual const char* Description(void) { return "Draw Texture"; }
virtual ~ cOglCmdDrawTexture(void)
{
};
virtual const char *Description(void)
{
return "Draw Texture";
}
virtual bool Execute(void);
};
class cOglCmdStoreImage : public cOglCmd {
class cOglCmdStoreImage:public cOglCmd
{
private:
sOglImage * imageRef;
tColor *data;
public:
cOglCmdStoreImage(sOglImage * imageRef, tColor * argb);
virtual ~ cOglCmdStoreImage(void);
virtual const char* Description(void) { return "Store Image"; }
virtual const char *Description(void)
{
return "Store Image";
}
virtual bool Execute(void);
};
class cOglCmdDropImage : public cOglCmd {
class cOglCmdDropImage:public cOglCmd
{
private:
sOglImage * imageRef;
cCondWait *wait;
public:
cOglCmdDropImage(sOglImage * imageRef, cCondWait * wait);
virtual ~cOglCmdDropImage(void) {};
virtual const char* Description(void) { return "Drop Image"; }
virtual ~ cOglCmdDropImage(void)
{
};
virtual const char *Description(void)
{
return "Drop Image";
}
virtual bool Execute(void);
};
@ -430,7 +577,8 @@ public:
#define OGL_MAX_OSDIMAGES 256
#define OGL_CMDQUEUE_SIZE 100
class cOglThread : public cThread {
class cOglThread:public cThread
{
private:
cCondWait * startWait;
cCondWait *wait;
@ -459,25 +607,44 @@ public:
int StoreImage(const cImage & image);
void DropImageData(int imageHandle);
sOglImage *GetImageRef(int slot);
int MaxTextureSize(void) { return maxTextureSize; };
int MaxTextureSize(void)
{
return maxTextureSize;
};
};
/****************************************************************************************
* cOglPixmap
****************************************************************************************/
class cOglPixmap : public cPixmap {
class cOglPixmap:public cPixmap
{
private:
cOglFb * fb;
std::shared_ptr < cOglThread > oglThread;
bool dirty;
public:
cOglPixmap(std::shared_ptr<cOglThread> oglThread, int Layer, const cRect &ViewPort, const cRect &DrawPort = cRect::Null);
cOglPixmap(std::shared_ptr < cOglThread > oglThread, int Layer, const cRect & ViewPort, const cRect & DrawPort =
cRect::Null);
virtual ~ cOglPixmap(void);
cOglFb *Fb(void) { return fb; };
int X(void) { return ViewPort().X(); };
int Y(void) { return ViewPort().Y(); };
virtual bool IsDirty(void) { return dirty; }
virtual void SetDirty(bool dirty = true) { this->dirty = dirty; }
cOglFb *Fb(void)
{
return fb;
};
int X(void)
{
return ViewPort().X();
};
int Y(void)
{
return ViewPort().Y();
};
virtual bool IsDirty(void)
{
return dirty;
}
virtual void SetDirty(bool dirty = true) {
this->dirty = dirty;
}
virtual void SetAlpha(int Alpha);
virtual void SetTile(bool Tile);
virtual void SetViewPort(const cRect & Rect);
@ -487,8 +654,10 @@ public:
virtual void DrawImage(const cPoint & Point, const cImage & Image);
virtual void DrawImage(const cPoint & Point, int ImageHandle);
virtual void DrawPixel(const cPoint & Point, tColor Color);
virtual void DrawBitmap(const cPoint &Point, const cBitmap &Bitmap, tColor ColorFg = 0, tColor ColorBg = 0, bool Overlay = false);
virtual void DrawText(const cPoint &Point, const char *s, tColor ColorFg, tColor ColorBg, const cFont *Font, int Width = 0, int Height = 0, int Alignment = taDefault);
virtual void DrawBitmap(const cPoint & Point, const cBitmap & Bitmap, tColor ColorFg = 0, tColor ColorBg =
0, bool Overlay = false);
virtual void DrawText(const cPoint & Point, const char *s, tColor ColorFg, tColor ColorBg, const cFont * Font,
int Width = 0, int Height = 0, int Alignment = taDefault);
virtual void DrawRectangle(const cRect & Rect, tColor Color);
virtual void DrawEllipse(const cRect & Rect, tColor Color, int Quadrants = 0);
virtual void DrawSlope(const cRect & Rect, tColor Color, int Type);
@ -501,7 +670,8 @@ public:
/******************************************************************************
* cOglOsd
******************************************************************************/
class cOglOsd : public cOsd {
class cOglOsd:public cOsd
{
private:
cOglFb * bFb;
std::shared_ptr < cOglThread > oglThread;
@ -515,7 +685,8 @@ public:
virtual cPixmap *CreatePixmap(int Layer, const cRect & ViewPort, const cRect & DrawPort = cRect::Null);
virtual void DestroyPixmap(cPixmap * Pixmap);
virtual void Flush(void);
virtual void DrawScaledBitmap(int x, int y, const cBitmap &Bitmap, double FactorX, double FactorY, bool AntiAlias = false);
virtual void DrawScaledBitmap(int x, int y, const cBitmap & Bitmap, double FactorX, double FactorY,
bool AntiAlias = false);
static cOglOutputFb *oFb;
};

View File

@ -7,7 +7,7 @@ msgid ""
msgstr ""
"Project-Id-Version: VDR \n"
"Report-Msgid-Bugs-To: <see README>\n"
"POT-Creation-Date: 2019-10-04 14:23+0200\n"
"POT-Creation-Date: 2019-10-26 18:41+0200\n"
"PO-Revision-Date: blabla\n"
"Last-Translator: blabla\n"
"Language-Team: blabla\n"
@ -792,7 +792,13 @@ msgstr ""
msgid "[softhddev] ready%s\n"
msgstr ""
msgid "video/egl: GlxSetupWindow can't make egl context current\n"
msgid "video: can't lock thread\n"
msgstr ""
msgid "video: can't unlock thread\n"
msgstr ""
msgid "video/egl: GlxSetupWindow can't make egl/glx context current\n"
msgstr ""
msgid "video/glx: no v-sync\n"
@ -891,21 +897,6 @@ msgstr ""
msgid "Failed rendering frame!\n"
msgstr ""
#, c-format
msgid "video/vdpau: can't get video surface parameters: %s\n"
msgstr ""
msgid "video/vdpau: out of memory\n"
msgstr ""
#, c-format
msgid "video/vdpau: unsupported chroma type %d\n"
msgstr ""
#, c-format
msgid "video/vdpau: can't get video surface bits: %s\n"
msgstr ""
#, c-format
msgid "video/vdpau: output buffer full, dropping frame (%d/%d)\n"
msgstr ""
@ -924,9 +915,6 @@ msgstr ""
msgid "video: decoder buffer empty, duping frame (%d/%d) %d v-buf\n"
msgstr ""
msgid "Failed creating vulkan swapchain!"
msgstr ""
msgid "video: fatal i/o error\n"
msgstr ""
@ -934,12 +922,6 @@ msgstr ""
msgid "video/event: No symbol for %d\n"
msgstr ""
msgid "video: can't lock thread\n"
msgstr ""
msgid "video: can't unlock thread\n"
msgstr ""
msgid "Cant get memory for PLACEBO struct"
msgstr ""
@ -952,6 +934,9 @@ msgstr ""
msgid "Failed to create Vulkan Device"
msgstr ""
msgid "Failed creating vulkan swapchain!"
msgstr ""
msgid "Failed initializing libplacebo renderer\n"
msgstr ""

View File

@ -90,7 +90,6 @@ color.rgb = pow(color.rgb, vec3(1.0/2.4));\n\
out_color = color;\n\
}\n" };
#else
char vertex_osd[] = { "\
\n\
@ -182,6 +181,7 @@ color.rgb = pow(color.rgb, vec3(1.0/2.4));\n\
out_color = color;\n\
}\n" };
#endif
/* Color conversion matrix: RGB = m * YUV + c
* m is in row-major matrix, with m[row][col], e.g.:
* [ a11 a12 a13 ] float m[3][3] = { { a11, a12, a13 },
@ -195,50 +195,53 @@ out_color = color;\n\
* is the Y vector (1, 1, 1), the 2nd is the U vector, the 3rd the V vector.
* The matrix might also be used for other conversions and colorspaces.
*/
struct mp_cmat {
struct mp_cmat
{
GLfloat m[3][3]; // colormatrix
GLfloat c[3]; //colormatrix_c
};
struct mp_mat {
struct mp_mat
{
GLfloat m[3][3];
};
// YUV input limited range (16-235 for luma, 16-240 for chroma)
// ITU-R BT.601 (SD)
struct mp_cmat yuv_bt601 = {\
{{ 1.164384, 1.164384, 1.164384 },\
{ 0.00000, -0.391762, 2.017232 },\
{ 1.596027, -0.812968 , 0.000000 }},\
{-0.874202, 0.531668, -1.085631 } };
struct mp_cmat yuv_bt601 = { {{1.164384, 1.164384, 1.164384},
{0.00000, -0.391762, 2.017232},
{1.596027, -0.812968, 0.000000}},
{-0.874202, 0.531668, -1.085631}
};
// ITU-R BT.709 (HD)
struct mp_cmat yuv_bt709 = {\
{{ 1.164384, 1.164384, 1.164384 },\
{ 0.00000, -0.213249, 2.112402 },\
{ 1.792741, -0.532909 , 0.000000 }},\
{-0.972945, 0.301483, -1.133402 } };
struct mp_cmat yuv_bt709 = { {{1.164384, 1.164384, 1.164384},
{0.00000, -0.213249, 2.112402},
{1.792741, -0.532909, 0.000000}},
{-0.972945, 0.301483, -1.133402}
};
// ITU-R BT.2020 non-constant luminance system
struct mp_cmat yuv_bt2020ncl = {\
{{ 1.164384, 1.164384, 1.164384 },\
{ 0.00000, -0.187326, 2.141772 },\
{ 1.678674, -0.650424 , 0.000000 }},\
{-0.915688, 0.347459, -1.148145 } };
struct mp_cmat yuv_bt2020ncl = { {{1.164384, 1.164384, 1.164384},
{0.00000, -0.187326, 2.141772},
{1.678674, -0.650424, 0.000000}},
{-0.915688, 0.347459, -1.148145}
};
// ITU-R BT.2020 constant luminance system
struct mp_cmat yuv_bt2020cl = {\
{{ 0.0000, 1.164384, 0.000000 },\
{ 0.00000, 0.000000, 1.138393 },\
{ 1.138393, 0.000000 , 0.000000 }},\
{-0.571429, -0.073059, -0.571429 } };
struct mp_cmat yuv_bt2020cl = { {{0.0000, 1.164384, 0.000000},
{0.00000, 0.000000, 1.138393},
{1.138393, 0.000000, 0.000000}},
{-0.571429, -0.073059, -0.571429}
};
float cms_matrix[3][3] = \
{{ 1.660497, -0.124547, -0.018154},\
{-0.587657, 1.132895, -0.100597},\
{-0.072840, -0.008348, 1.118751}};
float cms_matrix[3][3] = { {1.660497, -0.124547, -0.018154},
{-0.587657, 1.132895, -0.100597},
{-0.072840, -0.008348, 1.118751}
};
struct gl_vao_entry {
struct gl_vao_entry
{
// used for shader / glBindAttribLocation
const char *name;
// glVertexAttribPointer() arguments
@ -248,18 +251,20 @@ struct gl_vao_entry {
int offset;
};
struct vertex_pt {
struct vertex_pt
{
float x, y;
};
struct vertex_pi {
struct vertex_pi
{
GLint x, y;
};
#define TEXUNIT_VIDEO_NUM 6
struct vertex {
struct vertex
{
struct vertex_pt position;
struct vertex_pt texcoord[TEXUNIT_VIDEO_NUM];
};
@ -271,14 +276,13 @@ static const struct gl_vao_entry vertex_vao[] = {
{0}
};
static void compile_attach_shader(GLuint program,
GLenum type, const char *source)
static void compile_attach_shader(GLuint program, GLenum type, const char *source)
{
GLuint shader;
GLint status, log_length;
char log[4000];
GLsizei len;
shader = glCreateShader(type);
glShaderSource(shader, 1, &source, NULL);
glCompileShader(shader);
@ -306,7 +310,8 @@ static void link_shader(GLuint program)
Debug(3, "Link Status %d loglen %d\n", status, log_length);
}
static GLuint sc_generate_osd(GLuint gl_prog) {
static GLuint sc_generate_osd(GLuint gl_prog)
{
Debug(3, "vor create osd\n");
gl_prog = glCreateProgram();
@ -321,8 +326,8 @@ static GLuint sc_generate_osd(GLuint gl_prog) {
return gl_prog;
}
static GLuint sc_generate(GLuint gl_prog, enum AVColorSpace colorspace) {
static GLuint sc_generate(GLuint gl_prog, enum AVColorSpace colorspace)
{
char vname[80];
int n;
@ -402,6 +407,7 @@ static void render_pass_quad(int flip, float xcrop, float ycrop)
struct vertex va[4];
int n;
const struct gl_vao_entry *e;
// uhhhh what a hack
if (!flip) {
va[0].position.x = (float)-1.0;
@ -440,8 +446,6 @@ static void render_pass_quad(int flip, float xcrop, float ycrop)
va[3].texcoord[1].x = (float)1.0 - xcrop;
va[3].texcoord[1].y = (float)1.0 - ycrop; // abgeschnitten von rechts unten 1.0 - wert
glBindBuffer(GL_ARRAY_BUFFER, vao_buffer);
glBufferData(GL_ARRAY_BUFFER, 4 * sizeof(struct vertex), va, GL_DYNAMIC_DRAW);
glBindBuffer(GL_ARRAY_BUFFER, 0);
@ -451,8 +455,8 @@ static void render_pass_quad(int flip, float xcrop, float ycrop)
for (n = 0; vertex_vao[n].name; n++) {
e = &vertex_vao[n];
glEnableVertexAttribArray(n);
glVertexAttribPointer(n, e->num_elems, e->type, e->normalized,
sizeof(struct vertex), (void *)(intptr_t)e->offset);
glVertexAttribPointer(n, e->num_elems, e->type, e->normalized, sizeof(struct vertex),
(void *)(intptr_t) e->offset);
}
glBindBuffer(GL_ARRAY_BUFFER, 0);
@ -461,5 +465,3 @@ static void render_pass_quad(int flip, float xcrop, float ycrop)
for (n = 0; vertex_vao[n].name; n++)
glDisableVertexAttribArray(n);
}

View File

@ -74,8 +74,7 @@ static const char *const VERSION = "2.1.0"
;
/// vdr-plugin description.
static const char *const DESCRIPTION =
trNOOP("A software and GPU emulated UHD device");
static const char *const DESCRIPTION = trNOOP("A software and GPU emulated UHD device");
/// vdr-plugin text of main menu entry
static const char *MAINMENUENTRY = trNOOP("SoftUHD");
@ -92,7 +91,6 @@ static const char *const Resolution[RESOLUTIONS] = {
"576i", "720p", "1080i_fake", "1080i", "UHD"
};
static char ConfigMakePrimary; ///< config primary wanted
static char ConfigHideMainMenuEntry; ///< config hide main menu entry
static char ConfigDetachFromMainMenu; ///< detach from main menu entry instead of suspend
@ -245,8 +243,7 @@ class cSoftRemote:public cRemote
** @param release released key flag
** @param letter x11 character string (system setting locale)
*/
extern "C" void FeedKeyPress(const char *keymap, const char *key, int repeat,
int release, const char *letter)
extern "C" void FeedKeyPress(const char *keymap, const char *key, int repeat, int release, const char *letter)
{
cRemote *remote;
cSoftRemote *csoft;
@ -352,8 +349,7 @@ cSoftOsd::cSoftOsd(int left, int top, uint level)
#ifdef OSD_DEBUG
/* FIXME: OsdWidth/OsdHeight not correct!
*/
dsyslog("[softhddev]%s: %dx%d%+d%+d, %d\n", __FUNCTION__, OsdWidth(),
OsdHeight(), left, top, level);
dsyslog("[softhddev]%s: %dx%d%+d%+d, %d\n", __FUNCTION__, OsdWidth(), OsdHeight(), left, top, level);
#endif
OsdLevel = level;
@ -420,8 +416,7 @@ void cSoftOsd::Flush(void)
cPixmapMemory *pm;
#ifdef OSD_DEBUG
dsyslog("[softhddev]%s: level %d active %d\n", __FUNCTION__, OsdLevel,
Active());
dsyslog("[softhddev]%s: level %d active %d\n", __FUNCTION__, OsdLevel, Active());
#endif
if (!Active()) { // this osd is not active
@ -448,8 +443,7 @@ void cSoftOsd::Flush(void)
static char warned;
if (!warned) {
dsyslog("[softhddev]%s: FIXME: should be truecolor\n",
__FUNCTION__);
dsyslog("[softhddev]%s: FIXME: should be truecolor\n", __FUNCTION__);
warned = 1;
}
#endif
@ -533,16 +527,13 @@ void cSoftOsd::Flush(void)
argb = (uint8_t *) malloc(w * h * sizeof(uint32_t));
for (y = y1; y <= y2; ++y) {
for (x = x1; x <= x2; ++x) {
((uint32_t *) argb)[x - x1 + (y - y1) * w] =
bitmap->GetColor(x, y);
((uint32_t *) argb)[x - x1 + (y - y1) * w] = bitmap->GetColor(x, y);
}
}
#ifdef OSD_DEBUG
dsyslog("[softhddev]%s: draw %dx%d%+d%+d bm\n", __FUNCTION__, w, h,
xs + x1, ys + y1);
dsyslog("[softhddev]%s: draw %dx%d%+d%+d bm\n", __FUNCTION__, w, h, xs + x1, ys + y1);
#endif
OsdDrawARGB(0, 0, w, h, w * sizeof(uint32_t), argb, xs + x1,
ys + y1);
OsdDrawARGB(0, 0, w, h, w * sizeof(uint32_t), argb, xs + x1, ys + y1);
bitmap->Clean();
// FIXME: reuse argb
@ -620,8 +611,8 @@ void cSoftOsd::Flush(void)
}
}
#ifdef OSD_DEBUG
dsyslog("[softhddev]%s: draw %dx%d%+d%+d*%d -> %+d%+d %p\n",
__FUNCTION__, w, h, xp, yp, stride, x, y, pm->Data());
dsyslog("[softhddev]%s: draw %dx%d%+d%+d*%d -> %+d%+d %p\n", __FUNCTION__, w, h, xp, yp, stride, x, y,
pm->Data());
#endif
OsdDrawARGB(xp, yp, w, h, stride, pm->Data(), x, y);
@ -636,39 +627,119 @@ void cSoftOsd::Flush(void)
#ifdef USE_OPENGLOSD
//Dummy OSD for OpenGL OSD if no X Server is available
class cDummyOsd : public cOsd {
class cDummyOsd:public cOsd
{
public:
cDummyOsd(int Left, int Top, uint Level) : cOsd(Left, Top, Level) {}
virtual ~cDummyOsd() {}
cDummyOsd(int Left, int Top, uint Level):cOsd(Left, Top, Level)
{
}
virtual ~ cDummyOsd()
{
}
virtual cPixmap *CreatePixmap(int Layer, const cRect & ViewPort, const cRect & DrawPort = cRect::Null) {
(void)Layer; (void)ViewPort; (void)DrawPort;
(void)Layer;
(void)ViewPort;
(void)DrawPort;
return NULL;
}
virtual void DestroyPixmap(cPixmap *Pixmap) { (void)Pixmap; }
virtual void DrawImage(const cPoint &Point, const cImage &Image) { (void)Point; (void)Image; }
virtual void DrawImage(const cPoint &Point, int ImageHandle) { (void) Point; (void)ImageHandle; }
virtual eOsdError CanHandleAreas(const tArea *Areas, int NumAreas) { (void)Areas; (void)NumAreas; return oeOk; }
virtual eOsdError SetAreas(const tArea *Areas, int NumAreas) { (void)Areas; (void)NumAreas; return oeOk; }
virtual void SaveRegion(int x1, int y1, int x2, int y2) { (void)x1; (void)y1; (void)x2; (void)y2; }
virtual void RestoreRegion(void) {}
virtual eOsdError SetPalette(const cPalette &Palette, int Area) { (void)Palette; (void)Area; return oeOk; }
virtual void DrawPixel(int x, int y, tColor Color) { (void)x; (void)y; (void)Color; }
virtual void DrawBitmap(int x, int y, const cBitmap &Bitmap, tColor ColorFg = 0, tColor ColorBg = 0, bool ReplacePalette = false, bool Overlay = false) {
(void)x; (void)y; (void)Bitmap; (void)ColorFg; (void)ColorBg; (void)ReplacePalette; (void)Overlay;
virtual void DestroyPixmap(cPixmap * Pixmap)
{
(void)Pixmap;
}
virtual void DrawText(int x, int y, const char *s, tColor ColorFg, tColor ColorBg, const cFont *Font, int Width = 0, int Height = 0, int Alignment = taDefault) {
(void)x; (void)y; (void)s; (void)ColorFg; (void)ColorBg; (void)Font; (void)Width; (void)Height; (void)Alignment;
virtual void DrawImage(const cPoint & Point, const cImage & Image)
{
(void)Point;
(void)Image;
}
virtual void DrawRectangle(int x1, int y1, int x2, int y2, tColor Color) {
(void)x1; (void)y1; (void)x2; (void)y2; (void)Color;
virtual void DrawImage(const cPoint & Point, int ImageHandle)
{
(void)Point;
(void)ImageHandle;
}
virtual eOsdError CanHandleAreas(const tArea * Areas, int NumAreas)
{
(void)Areas;
(void)NumAreas;
return oeOk;
}
virtual eOsdError SetAreas(const tArea * Areas, int NumAreas)
{
(void)Areas;
(void)NumAreas;
return oeOk;
}
virtual void SaveRegion(int x1, int y1, int x2, int y2)
{
(void)x1;
(void)y1;
(void)x2;
(void)y2;
}
virtual void RestoreRegion(void)
{
}
virtual eOsdError SetPalette(const cPalette & Palette, int Area)
{
(void)Palette;
(void)Area;
return oeOk;
}
virtual void DrawPixel(int x, int y, tColor Color)
{
(void)x;
(void)y;
(void)Color;
}
virtual void DrawBitmap(int x, int y, const cBitmap & Bitmap, tColor ColorFg = 0, tColor ColorBg =
0, bool ReplacePalette = false, bool Overlay = false) {
(void)x;
(void)y;
(void)Bitmap;
(void)ColorFg;
(void)ColorBg;
(void)ReplacePalette;
(void)Overlay;
}
virtual void DrawText(int x, int y, const char *s, tColor ColorFg, tColor ColorBg, const cFont * Font, int Width =
0, int Height = 0, int Alignment = taDefault) {
(void)x;
(void)y;
(void)s;
(void)ColorFg;
(void)ColorBg;
(void)Font;
(void)Width;
(void)Height;
(void)Alignment;
}
virtual void DrawRectangle(int x1, int y1, int x2, int y2, tColor Color)
{
(void)x1;
(void)y1;
(void)x2;
(void)y2;
(void)Color;
}
virtual void DrawEllipse(int x1, int y1, int x2, int y2, tColor Color, int Quadrants = 0) {
(void)x1; (void)y1; (void)x2; (void)y2; (void)Color; (void)Quadrants;
(void)x1;
(void)y1;
(void)x2;
(void)y2;
(void)Color;
(void)Quadrants;
}
virtual void DrawSlope(int x1, int y1, int x2, int y2, tColor Color, int Type) {
(void)x1; (void)y1; (void)x2; (void)y2; (void)Color; (void)Type;
virtual void DrawSlope(int x1, int y1, int x2, int y2, tColor Color, int Type)
{
(void)x1;
(void)y1;
(void)x2;
(void)y2;
(void)Color;
(void)Type;
}
virtual void Flush(void)
{
}
virtual void Flush(void) {}
};
#endif
@ -711,6 +782,7 @@ int cSoftOsdProvider::StoreImageData(const cImage &Image)
{
if (StartOpenGlThread()) {
int imgHandle = oglThread->StoreImage(Image);
return imgHandle;
}
return 0;
@ -722,6 +794,7 @@ void cSoftOsdProvider::DropImageData(int ImageHandle)
oglThread->DropImageData(ImageHandle);
}
#endif
/**
** Create a new OSD.
**
@ -755,18 +828,20 @@ bool cSoftOsdProvider::ProvidesTrueColor(void)
}
#ifdef USE_OPENGLOSD
const cImage *cSoftOsdProvider::GetImageData(int ImageHandle) {
const cImage *cSoftOsdProvider::GetImageData(int ImageHandle)
{
return cOsdProvider::GetImageData(ImageHandle);
}
void cSoftOsdProvider::OsdSizeChanged(void) {
void cSoftOsdProvider::OsdSizeChanged(void)
{
//cleanup OpenGl Context
cSoftOsdProvider::StopOpenGlThread();
cOsdProvider::UpdateOsdSize();
}
bool cSoftOsdProvider::StartOpenGlThread(void) {
bool cSoftOsdProvider::StartOpenGlThread(void)
{
//only try to start worker thread if shd is attached
//otherwise glutInit() crashes
if (SuspendMode != NOT_SUSPENDED) {
@ -780,6 +855,7 @@ bool cSoftOsdProvider::StartOpenGlThread(void) {
oglThread.reset();
}
cCondWait wait;
dsyslog("[softhddev]Trying to start OpenGL Worker Thread");
oglThread.reset(new cOglThread(&wait, ConfigMaxSizeGPUImageCache));
wait.Wait();
@ -791,7 +867,8 @@ bool cSoftOsdProvider::StartOpenGlThread(void) {
return false;
}
void cSoftOsdProvider::StopOpenGlThread(void) {
void cSoftOsdProvider::StopOpenGlThread(void)
{
dsyslog("[softhddev]stopping OpenGL Worker Thread ");
if (oglThread) {
// OsdClose();
@ -966,8 +1043,7 @@ static inline cOsdItem *SeparatorItem(const char *label)
** @param flag flag handling collapsed or opened
** @param msg open message
*/
inline cOsdItem *cMenuSetupSoft::CollapsedItem(const char *label, int &flag,
const char *msg)
inline cOsdItem *cMenuSetupSoft::CollapsedItem(const char *label, int &flag, const char *msg)
{
cOsdItem *item;
@ -1020,6 +1096,7 @@ void cMenuSetupSoft::Create(void)
static int scalers = 0;
static char *scaling[100];
static char *scalingtest[100];
if (scalers == 0) {
scalingtest[0] = "Off";
for (scalers = 0; pl_named_filters[scalers].filter != NULL; scalers++) {
@ -1031,7 +1108,6 @@ void cMenuSetupSoft::Create(void)
}
#endif
current = Current(); // get current menu item index
Clear(); // clear the menu
@ -1068,51 +1144,39 @@ void cMenuSetupSoft::Create(void)
Add(CollapsedItem(tr("Video"), Video));
if (Video) {
#ifdef USE_SCREENSAVER
Add(new
cMenuEditBoolItem(tr("Enable Screensaver(DPMS) at black screen"),
&EnableDPMSatBlackScreen, trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("Enable Screensaver(DPMS) at black screen"), &EnableDPMSatBlackScreen,
trVDR("no"), trVDR("yes")));
#endif
Add(new cMenuEditStraItem(trVDR("4:3 video display format"),
&Video4to3DisplayFormat, 3, video_display_formats_4_3));
Add(new cMenuEditStraItem(trVDR("16:9+other video display format"),
&VideoOtherDisplayFormat, 3, video_display_formats_16_9));
Add(new cMenuEditStraItem(trVDR("4:3 video display format"), &Video4to3DisplayFormat, 3,
video_display_formats_4_3));
Add(new cMenuEditStraItem(trVDR("16:9+other video display format"), &VideoOtherDisplayFormat, 3,
video_display_formats_16_9));
#if 0
// FIXME: switch config gray/color configuration
Add(new cMenuEditIntItem(tr("Video background color (RGB)"),
(int *)&Background, 0, 0x00FFFFFF));
Add(new cMenuEditIntItem(tr("Video background color (Alpha)"),
(int *)&BackgroundAlpha, 0, 0xFF));
Add(new cMenuEditIntItem(tr("Video background color (RGB)"), (int *)&Background, 0, 0x00FFFFFF));
Add(new cMenuEditIntItem(tr("Video background color (Alpha)"), (int *)&BackgroundAlpha, 0, 0xFF));
#endif
#ifdef PLACEBO
Add(new cMenuEditBoolItem(tr("Use studio levels"),
&StudioLevels, trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("Use studio levels"), &StudioLevels, trVDR("no"), trVDR("yes")));
#endif
Add(new cMenuEditBoolItem(tr("60hz display mode"), &_60HzMode,
trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("Soft start a/v sync"), &SoftStartSync,
trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("Black during channel switch"),
&BlackPicture, trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("Clear decoder on channel switch"),
&ClearOnSwitch, trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("60hz display mode"), &_60HzMode, trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("Soft start a/v sync"), &SoftStartSync, trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("Black during channel switch"), &BlackPicture, trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("Clear decoder on channel switch"), &ClearOnSwitch, trVDR("no"), trVDR("yes")));
#if PLACEBO
Add(new cMenuEditStraItem(tr("Scaler Test"), &ConfigScalerTest, scalers + 1, scalingtest));
Add(new cMenuEditIntItem(tr("Brightness (-100..100)"),
&Brightness, -100, 100, tr("min"), tr("max")));
Add(new cMenuEditIntItem(tr("Contrast (0..100)"), &Contrast,
0, 100, tr("min"), tr("max")));
Add(new cMenuEditIntItem(tr("Saturation (0..100)"),
&Saturation, 0, 100, tr("min"), tr("max")));
Add(new cMenuEditIntItem(tr("Gamma (0..100)"),
&Gamma, 0, 100, tr("min"), tr("max")));
Add(new cMenuEditIntItem(tr("Brightness (-100..100)"), &Brightness, -100, 100, tr("min"), tr("max")));
Add(new cMenuEditIntItem(tr("Contrast (0..100)"), &Contrast, 0, 100, tr("min"), tr("max")));
Add(new cMenuEditIntItem(tr("Saturation (0..100)"), &Saturation, 0, 100, tr("min"), tr("max")));
Add(new cMenuEditIntItem(tr("Gamma (0..100)"), &Gamma, 0, 100, tr("min"), tr("max")));
Add(new cMenuEditIntItem(tr("Hue (-314..314) "), &Hue, -314, 314, tr("min"), tr("max")));
Add(new cMenuEditStraItem(tr("Monitor Colorspace"), &TargetColorSpace, 5, target_colorspace));
Add(new cMenuEditStraItem(tr("Color Blindness"), &ColorBlindness, 5, target_colorblindness));
Add(new cMenuEditIntItem(tr("Color Correction (-100..100) "), &ColorBlindnessFaktor, -100,
100, tr("min"), tr("max")));
Add(new cMenuEditIntItem(tr("Color Correction (-100..100) "), &ColorBlindnessFaktor, -100, 100, tr("min"),
tr("max")));
#endif
for (i = 0; i < RESOLUTIONS; ++i) {
@ -1132,20 +1196,16 @@ void cMenuSetupSoft::Create(void)
}
#endif
#if 0
Add(new cMenuEditBoolItem(tr("SkipChromaDeinterlace (vdpau)"),
&SkipChromaDeinterlace[i], trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("Inverse Telecine (vdpau)"),
&InverseTelecine[i], trVDR("no"), trVDR("yes")));
Add(new cMenuEditIntItem(tr("Denoise (0..1000) (vdpau)"),
&Denoise[i], 0, 1000, tr("off"), tr("max")));
Add(new cMenuEditIntItem(tr("Sharpen (-1000..1000) (vdpau)"),
&Sharpen[i], -1000, 1000, tr("blur max"),
Add(new cMenuEditBoolItem(tr("SkipChromaDeinterlace (vdpau)"), &SkipChromaDeinterlace[i], trVDR("no"),
trVDR("yes")));
Add(new cMenuEditBoolItem(tr("Inverse Telecine (vdpau)"), &InverseTelecine[i], trVDR("no"),
trVDR("yes")));
Add(new cMenuEditIntItem(tr("Denoise (0..1000) (vdpau)"), &Denoise[i], 0, 1000, tr("off"), tr("max")));
Add(new cMenuEditIntItem(tr("Sharpen (-1000..1000) (vdpau)"), &Sharpen[i], -1000, 1000, tr("blur max"),
tr("sharpen max")));
#endif
Add(new cMenuEditIntItem(tr("Cut top and bottom (pixel)"),
&CutTopBottom[i], 0, 250));
Add(new cMenuEditIntItem(tr("Cut left and right (pixel)"),
&CutLeftRight[i], 0, 250));
Add(new cMenuEditIntItem(tr("Cut top and bottom (pixel)"), &CutTopBottom[i], 0, 250));
Add(new cMenuEditIntItem(tr("Cut left and right (pixel)"), &CutLeftRight[i], 0, 250));
}
}
#ifdef USE_AUTOCROP
@ -1169,7 +1229,8 @@ void cMenuSetupSoft::Create(void)
Add(new cMenuEditBoolItem(tr("Pass-through default"), &AudioPassthroughDefault, trVDR("off"), trVDR("on")));
Add(new cMenuEditBoolItem(tr("\040\040PCM pass-through"), &AudioPassthroughPCM, trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("\040\040AC-3 pass-through"), &AudioPassthroughAC3, trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("\040\040E-AC-3 pass-through"),&AudioPassthroughEAC3, trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("\040\040E-AC-3 pass-through"), &AudioPassthroughEAC3, trVDR("no"),
trVDR("yes")));
Add(new cMenuEditBoolItem(tr("Enable (E-)AC-3 (decoder) downmix"), &AudioDownmix, trVDR("no"), trVDR("yes")));
Add(new cMenuEditBoolItem(tr("Volume control"), &AudioSoftvol, tr("Hardware"), tr("Software")));
Add(new cMenuEditBoolItem(tr("Enable normalize volume"), &AudioNormalize, trVDR("no"), trVDR("yes")));
@ -1401,10 +1462,8 @@ void cMenuSetupSoft::Store(void)
int i;
SetupStore("MakePrimary", ConfigMakePrimary = MakePrimary);
SetupStore("HideMainMenuEntry", ConfigHideMainMenuEntry =
HideMainMenuEntry);
SetupStore("DetachFromMainMenu", ConfigDetachFromMainMenu =
DetachFromMainMenu);
SetupStore("HideMainMenuEntry", ConfigHideMainMenuEntry = HideMainMenuEntry);
SetupStore("DetachFromMainMenu", ConfigDetachFromMainMenu = DetachFromMainMenu);
switch (OsdSize) {
case 0:
OsdWidth = 0;
@ -1421,8 +1480,7 @@ void cMenuSetupSoft::Store(void)
break;
}
if (ConfigOsdWidth != OsdWidth || ConfigOsdHeight != OsdHeight) {
VideoSetOsdSize(ConfigOsdWidth = OsdWidth, ConfigOsdHeight =
OsdHeight);
VideoSetOsdSize(ConfigOsdWidth = OsdWidth, ConfigOsdHeight = OsdHeight);
// FIXME: shown osd size not updated
}
SetupStore("Osd.Width", ConfigOsdWidth);
@ -1431,11 +1489,9 @@ void cMenuSetupSoft::Store(void)
SetupStore("Suspend.Close", ConfigSuspendClose = SuspendClose);
SetupStore("Suspend.X11", ConfigSuspendX11 = SuspendX11);
SetupStore("Video4to3DisplayFormat", Config4to3DisplayFormat =
Video4to3DisplayFormat);
SetupStore("Video4to3DisplayFormat", Config4to3DisplayFormat = Video4to3DisplayFormat);
VideoSet4to3DisplayFormat(Config4to3DisplayFormat);
SetupStore("VideoOtherDisplayFormat", ConfigOtherDisplayFormat =
VideoOtherDisplayFormat);
SetupStore("VideoOtherDisplayFormat", ConfigOtherDisplayFormat = VideoOtherDisplayFormat);
VideoSetOtherDisplayFormat(ConfigOtherDisplayFormat);
ConfigVideoBackground = Background << 8 | (BackgroundAlpha & 0xFF);
@ -1477,10 +1533,8 @@ void cMenuSetupSoft::Store(void)
SetupStore(buf, ConfigVideoScaling[i] = Scaling[i]);
snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Deinterlace");
SetupStore(buf, ConfigVideoDeinterlace[i] = Deinterlace[i]);
snprintf(buf, sizeof(buf), "%s.%s", Resolution[i],
"SkipChromaDeinterlace");
SetupStore(buf, ConfigVideoSkipChromaDeinterlace[i] =
SkipChromaDeinterlace[i]);
snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "SkipChromaDeinterlace");
SetupStore(buf, ConfigVideoSkipChromaDeinterlace[i] = SkipChromaDeinterlace[i]);
snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "InverseTelecine");
SetupStore(buf, ConfigVideoInverseTelecine[i] = InverseTelecine[i]);
snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "Denoise");
@ -1504,10 +1558,8 @@ void cMenuSetupSoft::Store(void)
SetupStore("AutoCrop.Interval", ConfigAutoCropInterval = AutoCropInterval);
SetupStore("AutoCrop.Delay", ConfigAutoCropDelay = AutoCropDelay);
SetupStore("AutoCrop.Tolerance", ConfigAutoCropTolerance =
AutoCropTolerance);
VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay,
ConfigAutoCropTolerance);
SetupStore("AutoCrop.Tolerance", ConfigAutoCropTolerance = AutoCropTolerance);
VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay, ConfigAutoCropTolerance);
ConfigAutoCropEnabled = ConfigAutoCropInterval != 0;
SetupStore("AudioDelay", ConfigVideoAudioDelay = AudioDelay);
@ -1536,15 +1588,12 @@ void cMenuSetupSoft::Store(void)
SetupStore("AudioSoftvol", ConfigAudioSoftvol = AudioSoftvol);
AudioSetSoftvol(ConfigAudioSoftvol);
SetupStore("AudioNormalize", ConfigAudioNormalize = AudioNormalize);
SetupStore("AudioMaxNormalize", ConfigAudioMaxNormalize =
AudioMaxNormalize);
SetupStore("AudioMaxNormalize", ConfigAudioMaxNormalize = AudioMaxNormalize);
AudioSetNormalize(ConfigAudioNormalize, ConfigAudioMaxNormalize);
SetupStore("AudioCompression", ConfigAudioCompression = AudioCompression);
SetupStore("AudioMaxCompression", ConfigAudioMaxCompression =
AudioMaxCompression);
SetupStore("AudioMaxCompression", ConfigAudioMaxCompression = AudioMaxCompression);
AudioSetCompression(ConfigAudioCompression, ConfigAudioMaxCompression);
SetupStore("AudioStereoDescent", ConfigAudioStereoDescent =
AudioStereoDescent);
SetupStore("AudioStereoDescent", ConfigAudioStereoDescent = AudioStereoDescent);
AudioSetStereoDescent(ConfigAudioStereoDescent);
SetupStore("AudioBufferTime", ConfigAudioBufferTime = AudioBufferTime);
SetupStore("AudioAutoAES", ConfigAudioAutoAES = AudioAutoAES);
@ -1565,21 +1614,17 @@ void cMenuSetupSoft::Store(void)
SetupStore("pip.Alt.Height", ConfigPipAltHeight = PipAltHeight);
SetupStore("pip.Alt.VideoX", ConfigPipAltVideoX = PipAltVideoX);
SetupStore("pip.Alt.VideoY", ConfigPipAltVideoY = PipAltVideoY);
SetupStore("pip.Alt.VideoWidth", ConfigPipAltVideoWidth =
PipAltVideoWidth);
SetupStore("pip.Alt.VideoHeight", ConfigPipAltVideoHeight =
PipAltVideoHeight);
SetupStore("pip.Alt.VideoWidth", ConfigPipAltVideoWidth = PipAltVideoWidth);
SetupStore("pip.Alt.VideoHeight", ConfigPipAltVideoHeight = PipAltVideoHeight);
#endif
#ifdef USE_SCREENSAVER
SetupStore("EnableDPMSatBlackScreen", ConfigEnableDPMSatBlackScreen =
EnableDPMSatBlackScreen);
SetupStore("EnableDPMSatBlackScreen", ConfigEnableDPMSatBlackScreen = EnableDPMSatBlackScreen);
SetDPMSatBlackScreen(ConfigEnableDPMSatBlackScreen);
#endif
#ifdef USE_OPENGLOSD
SetupStore("MaxSizeGPUImageCache", ConfigMaxSizeGPUImageCache =
MaxSizeGPUImageCache);
SetupStore("MaxSizeGPUImageCache", ConfigMaxSizeGPUImageCache = MaxSizeGPUImageCache);
#endif
}
@ -1711,8 +1756,7 @@ class cSoftReceiver:public cReceiver
**
** @param channel channel to receive
*/
cSoftReceiver::cSoftReceiver(const cChannel * channel):cReceiver(NULL,
MINPRIORITY)
cSoftReceiver::cSoftReceiver(const cChannel * channel):cReceiver(NULL, MINPRIORITY)
{
// cReceiver::channelID not setup, this can cause trouble
// we want video only
@ -1741,25 +1785,17 @@ void cSoftReceiver::Activate(bool on)
GetOsdSize(&width, &height, &video_aspect);
if (PipAltPosition) {
PipStart((ConfigPipAltVideoX * width) / 100,
(ConfigPipAltVideoY * height) / 100,
ConfigPipAltVideoWidth ? (ConfigPipAltVideoWidth * width) /
100 : width,
ConfigPipAltVideoHeight ? (ConfigPipAltVideoHeight * height) /
100 : height, (ConfigPipAltX * width) / 100,
(ConfigPipAltY * height) / 100,
PipStart((ConfigPipAltVideoX * width) / 100, (ConfigPipAltVideoY * height) / 100,
ConfigPipAltVideoWidth ? (ConfigPipAltVideoWidth * width) / 100 : width,
ConfigPipAltVideoHeight ? (ConfigPipAltVideoHeight * height) / 100 : height,
(ConfigPipAltX * width) / 100, (ConfigPipAltY * height) / 100,
ConfigPipAltWidth ? (ConfigPipAltWidth * width) / 100 : width,
ConfigPipAltHeight ? (ConfigPipAltHeight * height) /
100 : height);
ConfigPipAltHeight ? (ConfigPipAltHeight * height) / 100 : height);
} else {
PipStart((ConfigPipVideoX * width) / 100,
(ConfigPipVideoY * height) / 100,
ConfigPipVideoWidth ? (ConfigPipVideoWidth * width) /
100 : width,
ConfigPipVideoHeight ? (ConfigPipVideoHeight * height) /
100 : height, (ConfigPipX * width) / 100,
(ConfigPipY * height) / 100,
ConfigPipWidth ? (ConfigPipWidth * width) / 100 : width,
PipStart((ConfigPipVideoX * width) / 100, (ConfigPipVideoY * height) / 100,
ConfigPipVideoWidth ? (ConfigPipVideoWidth * width) / 100 : width,
ConfigPipVideoHeight ? (ConfigPipVideoHeight * height) / 100 : height, (ConfigPipX * width) / 100,
(ConfigPipY * height) / 100, ConfigPipWidth ? (ConfigPipWidth * width) / 100 : width,
ConfigPipHeight ? (ConfigPipHeight * height) / 100 : height);
}
} else {
@ -1793,13 +1829,11 @@ static void PipPesParse(const uint8_t * data, int size, int is_start)
if (is_start) { // start of pes packet
if (pes_index) {
if (0) {
fprintf(stderr, "pip: PES packet %8d %02x%02x\n", pes_index,
pes_buf[2], pes_buf[3]);
fprintf(stderr, "pip: PES packet %8d %02x%02x\n", pes_index, pes_buf[2], pes_buf[3]);
}
if (pes_buf[0] || pes_buf[1] || pes_buf[2] != 0x01) {
// FIXME: first should always fail
esyslog(tr("[softhddev]pip: invalid PES packet %d\n"),
pes_index);
esyslog(tr("[softhddev]pip: invalid PES packet %d\n"), pes_index);
} else {
PipPlayVideo(pes_buf, pes_index);
// FIXME: buffer full: pes packet is dropped
@ -1860,8 +1894,8 @@ void cSoftReceiver::Receive(uchar * data, int size)
int pid;
pid = (p[1] & 0x1F) << 8 | p[2];
fprintf(stderr, "tsdemux: PID: %#04x%s%s\n", pid,
p[1] & 0x40 ? " start" : "", p[3] & 0x10 ? " payload" : "");
fprintf(stderr, "tsdemux: PID: %#04x%s%s\n", pid, p[1] & 0x40 ? " start" : "",
p[3] & 0x10 ? " payload" : "");
}
// skip adaptation field
switch (p[3] & 0x30) { // adaption field
@ -1876,8 +1910,7 @@ void cSoftReceiver::Receive(uchar * data, int size)
payload = 5 + p[4];
// illegal length, ignore packet
if (payload >= TS_PACKET_SIZE) {
dsyslog
("[softhddev]tsdemux: illegal adaption field length\n");
dsyslog("[softhddev]tsdemux: illegal adaption field length\n");
goto next_packet;
}
break;
@ -1985,8 +2018,7 @@ static void PipNextAvailableChannel(int direction)
channel = direction > 0 ? Channels MURKS Next(channel)
: Channels MURKS Prev(channel);
if (!channel && Setup.ChannelsWrap) {
channel =
direction > 0 ? Channels MURKS First() : Channels MURKS Last();
channel = direction > 0 ? Channels MURKS First() : Channels MURKS Last();
}
if (channel && !channel->GroupSep()
&& (device = cDevice::GetDevice(channel, 0, false, true))
@ -2037,23 +2069,16 @@ static void SwapPipPosition(void)
GetOsdSize(&width, &height, &video_aspect);
if (PipAltPosition) {
PipSetPosition((ConfigPipAltVideoX * width) / 100,
(ConfigPipAltVideoY * height) / 100,
ConfigPipAltVideoWidth ? (ConfigPipAltVideoWidth * width) /
100 : width,
ConfigPipAltVideoHeight ? (ConfigPipAltVideoHeight * height) /
100 : height, (ConfigPipAltX * width) / 100,
(ConfigPipAltY * height) / 100,
ConfigPipAltWidth ? (ConfigPipAltWidth * width) / 100 : width,
PipSetPosition((ConfigPipAltVideoX * width) / 100, (ConfigPipAltVideoY * height) / 100,
ConfigPipAltVideoWidth ? (ConfigPipAltVideoWidth * width) / 100 : width,
ConfigPipAltVideoHeight ? (ConfigPipAltVideoHeight * height) / 100 : height, (ConfigPipAltX * width) / 100,
(ConfigPipAltY * height) / 100, ConfigPipAltWidth ? (ConfigPipAltWidth * width) / 100 : width,
ConfigPipAltHeight ? (ConfigPipAltHeight * height) / 100 : height);
} else {
PipSetPosition((ConfigPipVideoX * width) / 100,
(ConfigPipVideoY * height) / 100,
PipSetPosition((ConfigPipVideoX * width) / 100, (ConfigPipVideoY * height) / 100,
ConfigPipVideoWidth ? (ConfigPipVideoWidth * width) / 100 : width,
ConfigPipVideoHeight ? (ConfigPipVideoHeight * height) /
100 : height, (ConfigPipX * width) / 100,
(ConfigPipY * height) / 100,
ConfigPipWidth ? (ConfigPipWidth * width) / 100 : width,
ConfigPipVideoHeight ? (ConfigPipVideoHeight * height) / 100 : height, (ConfigPipX * width) / 100,
(ConfigPipY * height) / 100, ConfigPipWidth ? (ConfigPipWidth * width) / 100 : width,
ConfigPipHeight ? (ConfigPipHeight * height) / 100 : height);
}
}
@ -2136,7 +2161,8 @@ void cSoftHdMenu::Create(void)
Add(new cOsdItem(NULL, osUnknown, false));
Add(new cOsdItem(NULL, osUnknown, false));
GetStats(&missed, &duped, &dropped, &counter, &frametime);
Add(new cOsdItem(cString::sprintf(tr(" Frames missed(%d) duped(%d) dropped(%d) total(%d)"), missed, duped, dropped, counter), osUnknown, false));
Add(new cOsdItem(cString::sprintf(tr(" Frames missed(%d) duped(%d) dropped(%d) total(%d)"), missed, duped, dropped,
counter), osUnknown, false));
Add(new cOsdItem(cString::sprintf(tr(" Frame Process time %2.2fms"), frametime), osUnknown, false));
SetCurrent(Get(current)); // restore selected menu entry
Display(); // display build menu
@ -2145,8 +2171,7 @@ void cSoftHdMenu::Create(void)
/**
** Soft device menu constructor.
*/
cSoftHdMenu::cSoftHdMenu(const char *title, int c0, int c1, int c2, int c3,
int c4)
cSoftHdMenu::cSoftHdMenu(const char *title, int c0, int c1, int c2, int c3, int c4)
:cOsdMenu(title, c0, c1, c2, c3, c4)
{
HotkeyState = HksInitial;
@ -2193,16 +2218,12 @@ static void HandleHotkey(int code)
case 13: // decrease audio delay
ConfigVideoAudioDelay -= 10;
VideoSetAudioDelay(ConfigVideoAudioDelay);
Skins.QueueMessage(mtInfo,
cString::sprintf(tr("audio delay changed to %d"),
ConfigVideoAudioDelay));
Skins.QueueMessage(mtInfo, cString::sprintf(tr("audio delay changed to %d"), ConfigVideoAudioDelay));
break;
case 14: // increase audio delay
ConfigVideoAudioDelay += 10;
VideoSetAudioDelay(ConfigVideoAudioDelay);
Skins.QueueMessage(mtInfo,
cString::sprintf(tr("audio delay changed to %d"),
ConfigVideoAudioDelay));
Skins.QueueMessage(mtInfo, cString::sprintf(tr("audio delay changed to %d"), ConfigVideoAudioDelay));
break;
case 15:
ConfigAudioDownmix ^= 1;
@ -2235,8 +2256,7 @@ static void HandleHotkey(int code)
if (!ConfigAutoCropInterval) {
ConfigAutoCropInterval = 50;
}
VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay,
ConfigAutoCropTolerance);
VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay, ConfigAutoCropTolerance);
Skins.QueueMessage(mtInfo, tr("auto-crop enabled"));
break;
case 25: // toggle auto-crop
@ -2245,13 +2265,12 @@ static void HandleHotkey(int code)
if (!ConfigAutoCropInterval) {
ConfigAutoCropInterval = 50;
}
VideoSetAutoCrop(ConfigAutoCropEnabled * ConfigAutoCropInterval,
ConfigAutoCropDelay, ConfigAutoCropTolerance);
VideoSetAutoCrop(ConfigAutoCropEnabled * ConfigAutoCropInterval, ConfigAutoCropDelay,
ConfigAutoCropTolerance);
if (ConfigAutoCropEnabled) {
Skins.QueueMessage(mtInfo, tr("auto-crop enabled"));
} else {
Skins.QueueMessage(mtInfo,
tr("auto-crop disabled and freezed"));
Skins.QueueMessage(mtInfo, tr("auto-crop disabled and freezed"));
}
break;
case 30: // change 4:3 -> window mode
@ -2334,15 +2353,13 @@ eOSState cSoftHdMenu::ProcessKey(eKeys key)
HotkeyCode *= 10;
HotkeyCode += key - k0;
HotkeyState = HksInitial;
dsyslog("[softhddev]%s: hot-key %d\n", __FUNCTION__,
HotkeyCode);
dsyslog("[softhddev]%s: hot-key %d\n", __FUNCTION__, HotkeyCode);
HandleHotkey(HotkeyCode);
return osEnd;
}
if (key == kOk) {
HotkeyState = HksInitial;
dsyslog("[softhddev]%s: hot-key %d\n", __FUNCTION__,
HotkeyCode);
dsyslog("[softhddev]%s: hot-key %d\n", __FUNCTION__, HotkeyCode);
HandleHotkey(HotkeyCode);
return osEnd;
}
@ -2371,8 +2388,7 @@ eOSState cSoftHdMenu::ProcessKey(eKeys key)
Suspend(1, 1, 0);
SuspendMode = SUSPEND_DETACHED;
} else {
Suspend(ConfigSuspendClose, ConfigSuspendClose,
ConfigSuspendX11);
Suspend(ConfigSuspendClose, ConfigSuspendClose, ConfigSuspendX11);
SuspendMode = SUSPEND_NORMAL;
}
#ifdef USE_OPENGLOSD
@ -2380,8 +2396,7 @@ eOSState cSoftHdMenu::ProcessKey(eKeys key)
cSoftOsdProvider::StopOpenGlThread();
#endif
if (ShutdownHandler.GetUserInactiveTime()) {
dsyslog("[softhddev]%s: set user inactive\n",
__FUNCTION__);
dsyslog("[softhddev]%s: set user inactive\n", __FUNCTION__);
ShutdownHandler.SetUserInactive();
}
}
@ -2424,10 +2439,16 @@ class cSoftHdDevice:public cDevice
cSoftHdDevice(void);
virtual ~ cSoftHdDevice(void);
#ifdef CUVID
virtual cString DeviceName(void) const { return "softhdcuvid"; }
virtual cString DeviceName(void) const
{
return "softhdcuvid";
}
#endif
#ifdef VAAPI
virtual cString DeviceName(void) const { return "softhdvaapi"; }
virtual cString DeviceName(void) const
{
return "softhdvaapi";
}
#endif
virtual bool HasDecoder(void) const;
virtual bool CanReplay(void) const;
@ -2702,8 +2723,7 @@ void cSoftHdDevice::Mute(void)
*/
void cSoftHdDevice::StillPicture(const uchar * data, int length)
{
dsyslog("[softhddev]%s: %s %p %d\n", __FUNCTION__,
data[0] == 0x47 ? "ts" : "pes", data, length);
dsyslog("[softhddev]%s: %s %p %d\n", __FUNCTION__, data[0] == 0x47 ? "ts" : "pes", data, length);
if (data[0] == 0x47) { // ts sync
cDevice::StillPicture(data, length);
@ -2722,8 +2742,7 @@ void cSoftHdDevice::StillPicture(const uchar * data, int length)
** @retval true if ready
** @retval false if busy
*/
bool cSoftHdDevice::Poll(
__attribute__ ((unused)) cPoller & poller, int timeout_ms)
bool cSoftHdDevice::Poll( __attribute__((unused)) cPoller & poller, int timeout_ms)
{
//dsyslog("[softhddev]%s: %d\n", __FUNCTION__, timeout_ms);
@ -2748,8 +2767,7 @@ bool cSoftHdDevice::Flush(int timeout_ms)
** Sets the video display format to the given one (only useful if this
** device has an MPEG decoder).
*/
void cSoftHdDevice:: SetVideoDisplayFormat(eVideoDisplayFormat
video_display_format)
void cSoftHdDevice::SetVideoDisplayFormat(eVideoDisplayFormat video_display_format)
{
dsyslog("[softhddev]%s: %d\n", __FUNCTION__, video_display_format);
@ -2821,13 +2839,13 @@ int cSoftHdDevice::PlayAudio(const uchar * data, int length, uchar id)
return::PlayAudio(data, length, id);
}
void cSoftHdDevice::SetAudioTrackDevice(
__attribute__ ((unused)) eTrackType type)
void cSoftHdDevice::SetAudioTrackDevice( __attribute__((unused)) eTrackType type)
{
//dsyslog("[softhddev]%s:\n", __FUNCTION__);
}
void cSoftHdDevice::SetDigitalAudioDevice( __attribute__ ((unused)) bool on)
void cSoftHdDevice::SetDigitalAudioDevice( __attribute__((unused))
bool on)
{
//dsyslog("[softhddev]%s: %s\n", __FUNCTION__, on ? "true" : "false");
}
@ -2914,8 +2932,7 @@ int cSoftHdDevice::PlayTsAudio(const uchar * data, int length)
** @param width number of horizontal pixels in the frame
** @param height number of vertical pixels in the frame
*/
uchar *cSoftHdDevice::GrabImage(int &size, bool jpeg, int quality, int width,
int height)
uchar *cSoftHdDevice::GrabImage(int &size, bool jpeg, int quality, int width, int height)
{
dsyslog("[softhddev]%s: %d, %d, %d, %dx%d\n", __FUNCTION__, size, jpeg, quality, width, height);
@ -2938,8 +2955,8 @@ uchar *cSoftHdDevice::GrabImage(int &size, bool jpeg, int quality, int width,
**
** @returns the real rectangle or cRect:Null if invalid.
*/
cRect cSoftHdDevice::CanScaleVideo(const cRect & rect,
__attribute__ ((unused)) int alignment)
cRect cSoftHdDevice::CanScaleVideo(const cRect & rect, __attribute__((unused))
int alignment)
{
return rect;
}
@ -2952,8 +2969,7 @@ cRect cSoftHdDevice::CanScaleVideo(const cRect & rect,
void cSoftHdDevice::ScaleVideo(const cRect & rect)
{
#ifdef OSD_DEBUG
dsyslog("[softhddev]%s: %dx%d%+d%+d\n", __FUNCTION__, rect.Width(),
rect.Height(), rect.X(), rect.Y());
dsyslog("[softhddev]%s: %dx%d%+d%+d\n", __FUNCTION__, rect.Width(), rect.Height(), rect.X(), rect.Y());
#endif
::ScaleVideo(rect.X(), rect.Y(), rect.Width(), rect.Height());
}
@ -2963,8 +2979,7 @@ void cSoftHdDevice::ScaleVideo(const cRect & rect)
/**
** Call rgb to jpeg for C Plugin.
*/
extern "C" uint8_t * CreateJpeg(uint8_t * image, int *size, int quality,
int width, int height)
extern "C" uint8_t * CreateJpeg(uint8_t * image, int *size, int quality, int width, int height)
{
return (uint8_t *) RgbToJpeg((uchar *) image, width, height, *size, quality);
}
@ -3082,12 +3097,10 @@ bool cPluginSoftHdDevice::Start(void)
//dsyslog("[softhddev]%s:\n", __FUNCTION__);
if (!MyDevice->IsPrimaryDevice()) {
isyslog("[softhddev] softhddevice %d is not the primary device!",
MyDevice->DeviceNumber());
isyslog("[softhddev] softhddevice %d is not the primary device!", MyDevice->DeviceNumber());
if (ConfigMakePrimary) {
// Must be done in the main thread
dsyslog("[softhddev] makeing softhddevice %d the primary device!",
MyDevice->DeviceNumber());
dsyslog("[softhddev] makeing softhddevice %d the primary device!", MyDevice->DeviceNumber());
DoMakePrimary = MyDevice->DeviceNumber() + 1;
}
}
@ -3174,8 +3187,7 @@ void cPluginSoftHdDevice::MainThreadHook(void)
//dsyslog("[softhddev]%s:\n", __FUNCTION__);
if (DoMakePrimary) {
dsyslog("[softhddev]%s: switching primary device to %d\n",
__FUNCTION__, DoMakePrimary);
dsyslog("[softhddev]%s: switching primary device to %d\n", __FUNCTION__, DoMakePrimary);
cDevice::SetPrimaryDevice(DoMakePrimary);
DoMakePrimary = 0;
}
@ -3274,6 +3286,7 @@ bool cPluginSoftHdDevice::SetupParse(const char *name, const char *value)
}
if (!strcasecmp(name, "Brightness")) {
int i;
i = atoi(value);
ConfigVideoBrightness = i > 100 ? 100 : i;
VideoSetBrightness(ConfigVideoBrightness);
@ -3281,6 +3294,7 @@ bool cPluginSoftHdDevice::SetupParse(const char *name, const char *value)
}
if (!strcasecmp(name, "Contrast")) {
int i;
i = atoi(value);
ConfigVideoContrast = i > 100 ? 100 : i;
VideoSetContrast(ConfigVideoContrast);
@ -3288,6 +3302,7 @@ bool cPluginSoftHdDevice::SetupParse(const char *name, const char *value)
}
if (!strcasecmp(name, "Saturation")) {
int i;
i = atoi(value);
ConfigVideoSaturation = i > 100 ? 100 : i;
VideoSetSaturation(ConfigVideoSaturation);
@ -3295,6 +3310,7 @@ bool cPluginSoftHdDevice::SetupParse(const char *name, const char *value)
}
if (!strcasecmp(name, "Gamma")) {
int i;
i = atoi(value);
ConfigGamma = i > 100 ? 100 : i;
VideoSetGamma(ConfigGamma);
@ -3337,8 +3353,7 @@ bool cPluginSoftHdDevice::SetupParse(const char *name, const char *value)
VideoSetDeinterlace(ConfigVideoDeinterlace);
return true;
}
snprintf(buf, sizeof(buf), "%s.%s", Resolution[i],
"SkipChromaDeinterlace");
snprintf(buf, sizeof(buf), "%s.%s", Resolution[i], "SkipChromaDeinterlace");
if (!strcasecmp(name, buf)) {
ConfigVideoSkipChromaDeinterlace[i] = atoi(value);
VideoSetSkipChromaDeinterlace(ConfigVideoSkipChromaDeinterlace);
@ -3378,19 +3393,16 @@ bool cPluginSoftHdDevice::SetupParse(const char *name, const char *value)
}
if (!strcasecmp(name, "AutoCrop.Interval")) {
VideoSetAutoCrop(ConfigAutoCropInterval =
atoi(value), ConfigAutoCropDelay, ConfigAutoCropTolerance);
VideoSetAutoCrop(ConfigAutoCropInterval = atoi(value), ConfigAutoCropDelay, ConfigAutoCropTolerance);
ConfigAutoCropEnabled = ConfigAutoCropInterval != 0;
return true;
}
if (!strcasecmp(name, "AutoCrop.Delay")) {
VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay =
atoi(value), ConfigAutoCropTolerance);
VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay = atoi(value), ConfigAutoCropTolerance);
return true;
}
if (!strcasecmp(name, "AutoCrop.Tolerance")) {
VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay,
ConfigAutoCropTolerance = atoi(value));
VideoSetAutoCrop(ConfigAutoCropInterval, ConfigAutoCropDelay, ConfigAutoCropTolerance = atoi(value));
return true;
}
@ -3573,11 +3585,10 @@ bool cPluginSoftHdDevice::Service(const char *id, void *data)
return false;
}
SoftHDDevice_AtmoGrabService_v1_0_t *r =
(SoftHDDevice_AtmoGrabService_v1_0_t *) data;
SoftHDDevice_AtmoGrabService_v1_0_t *r = (SoftHDDevice_AtmoGrabService_v1_0_t *) data;
if (r->structSize != sizeof(SoftHDDevice_AtmoGrabService_v1_0_t)
|| r->analyseSize < 64 || r->analyseSize > 256
|| r->clippedOverscan < 0 || r->clippedOverscan > 200) {
|| r->analyseSize < 64 || r->analyseSize > 256 || r->clippedOverscan < 0 || r->clippedOverscan > 200) {
return false;
}
@ -3625,21 +3636,16 @@ bool cPluginSoftHdDevice::Service(const char *id, void *data)
** FIXME: translation?
*/
static const char *SVDRPHelpText[] = {
"SUSP\n" "\040 Suspend plugin.\n\n"
" The plugin is suspended to save energie. Depending on the setup\n"
"SUSP\n" "\040 Suspend plugin.\n\n" " The plugin is suspended to save energie. Depending on the setup\n"
" 'softhddevice.Suspend.Close = 0' only the video and audio output\n"
" is stopped or with 'softhddevice.Suspend.Close = 1' the video\n"
" and audio devices are closed.\n"
" is stopped or with 'softhddevice.Suspend.Close = 1' the video\n" " and audio devices are closed.\n"
" If 'softhddevice.Suspend.X11 = 1' is set and the X11 server was\n"
" started by the plugin, the X11 server would also be closed.\n"
" (Stopping X11 while suspended isn't supported yet)\n",
"RESU\n" "\040 Resume plugin.\n\n"
" Resume the suspended plugin. The plugin could be suspended by\n"
"RESU\n" "\040 Resume plugin.\n\n" " Resume the suspended plugin. The plugin could be suspended by\n"
" the command line option '-s' or by a previous SUSP command.\n"
" If the x11 server was stopped by the plugin, it will be\n"
" restarted.",
"DETA\n" "\040 Detach plugin.\n\n"
" The plugin will be detached from the audio, video and DVB\n"
" If the x11 server was stopped by the plugin, it will be\n" " restarted.",
"DETA\n" "\040 Detach plugin.\n\n" " The plugin will be detached from the audio, video and DVB\n"
" devices. Other programs or plugins can use them now.\n",
"ATTA <-d display> <-a audio> <-p pass>\n" " Attach plugin.\n\n"
" Attach the plugin to audio, video and DVB devices. Use:\n"
@ -3649,34 +3655,22 @@ static const char *SVDRPHelpText[] = {
"PRIM <n>\n" " Make <n> the primary device.\n\n"
" <n> is the number of device. Without number softhddevice becomes\n"
" the primary device. If becoming primary, the plugin is attached\n"
" to the devices. If loosing primary, the plugin is detached from\n"
" the devices.",
"HOTK key\n" " Execute hotkey.\n\n"
" key is the hotkey number, following are supported:\n"
" 10: disable audio pass-through\n"
" 11: enable audio pass-through\n"
" 12: toggle audio pass-through\n"
" 13: decrease audio delay by 10ms\n"
" to the devices. If loosing primary, the plugin is detached from\n" " the devices.",
"HOTK key\n" " Execute hotkey.\n\n" " key is the hotkey number, following are supported:\n"
" 10: disable audio pass-through\n" " 11: enable audio pass-through\n"
" 12: toggle audio pass-through\n" " 13: decrease audio delay by 10ms\n"
" 14: increase audio delay by 10ms\n" " 15: toggle ac3 mixdown\n"
" 20: disable fullscreen\n\040 21: enable fullscreen\n"
" 22: toggle fullscreen\n"
" 23: disable auto-crop\n\040 24: enable auto-crop\n"
" 25: toggle auto-crop\n"
" 20: disable fullscreen\n\040 21: enable fullscreen\n" " 22: toggle fullscreen\n"
" 23: disable auto-crop\n\040 24: enable auto-crop\n" " 25: toggle auto-crop\n"
" 30: stretch 4:3 to display\n\040 31: pillar box 4:3 in display\n"
" 32: center cut-out 4:3 to display\n"
" 39: rotate 4:3 to display zoom mode\n"
" 40: stretch other aspect ratios to display\n"
" 41: letter box other aspect ratios in display\n"
" 32: center cut-out 4:3 to display\n" " 39: rotate 4:3 to display zoom mode\n"
" 40: stretch other aspect ratios to display\n" " 41: letter box other aspect ratios in display\n"
" 42: center cut-out other aspect ratios to display\n"
" 49: rotate other aspect ratios to display zoom mode\n",
"STAT\n" "\040 Display SuspendMode of the plugin.\n\n"
" reply code is 910 + SuspendMode\n"
" SUSPEND_EXTERNAL == -1 (909)\n"
" NOT_SUSPENDED == 0 (910)\n"
" SUSPEND_NORMAL == 1 (911)\n"
" SUSPEND_DETACHED == 2 (912)\n",
"RAIS\n" "\040 Raise softhddevice window\n\n"
" If Xserver is not started by softhddevice, the window which\n"
"STAT\n" "\040 Display SuspendMode of the plugin.\n\n" " reply code is 910 + SuspendMode\n"
" SUSPEND_EXTERNAL == -1 (909)\n" " NOT_SUSPENDED == 0 (910)\n"
" SUSPEND_NORMAL == 1 (911)\n" " SUSPEND_DETACHED == 2 (912)\n",
"RAIS\n" "\040 Raise softhddevice window\n\n" " If Xserver is not started by softhddevice, the window which\n"
" contains the softhddevice frontend will be raised to the front.\n",
NULL
};
@ -3699,8 +3693,8 @@ const char **cPluginSoftHdDevice::SVDRPHelpPages(void)
** @param option all command arguments
** @param reply_code reply code
*/
cString cPluginSoftHdDevice::SVDRPCommand(const char *command,
const char *option, __attribute__ ((unused)) int &reply_code)
cString cPluginSoftHdDevice::SVDRPCommand(const char *command, const char *option, __attribute__((unused))
int &reply_code)
{
if (!strcasecmp(command, "STAT")) {
reply_code = 910 + SuspendMode;

View File

@ -232,10 +232,8 @@ static int MpegCheck(const uint8_t * data, int size)
break;
}
if (0) {
Debug(3,
"pesdemux: mpeg%s layer%d bitrate=%d samplerate=%d %d bytes\n",
mpeg25 ? "2.5" : mpeg2 ? "2" : "1", layer, bit_rate, sample_rate,
frame_size);
Debug(3, "pesdemux: mpeg%s layer%d bitrate=%d samplerate=%d %d bytes\n", mpeg25 ? "2.5" : mpeg2 ? "2" : "1",
layer, bit_rate, sample_rate, frame_size);
}
if (frame_size + 4 > size) {
@ -580,8 +578,7 @@ static void PesInit(PesDemux * pesdx)
/// @param size number of payload data bytes
/// @param is_start flag, start of pes packet
///
static void PesParse(PesDemux * pesdx, const uint8_t * data, int size,
int is_start)
static void PesParse(PesDemux * pesdx, const uint8_t * data, int size, int is_start)
{
const uint8_t *p;
const uint8_t *q;
@ -686,8 +683,7 @@ static void PesParse(PesDemux * pesdx, const uint8_t * data, int size,
// new codec id, close and open new
if (AudioCodecID != codec_id) {
Debug(3, "pesdemux: new codec %#06x -> %#06x\n",
AudioCodecID, codec_id);
Debug(3, "pesdemux: new codec %#06x -> %#06x\n", AudioCodecID, codec_id);
CodecAudioClose(MyAudioDecoder);
CodecAudioOpen(MyAudioDecoder, codec_id);
AudioCodecID = codec_id;
@ -782,21 +778,20 @@ static void PesParse(PesDemux * pesdx, const uint8_t * data, int size,
if ((pesdx->Header[7] & 0xC0) == 0x80) {
pts =
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 |
(data[11] & 0xFE) << 14 | data[12] << 7 | (data[13]
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] & 0xFE) << 14 | data[12] << 7
| (data[13]
& 0xFE) >> 1;
pesdx->PTS = pts;
pesdx->DTS = AV_NOPTS_VALUE;
} else if ((pesdx->Header[7] & 0xC0) == 0xC0) {
pts =
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 |
(data[11] & 0xFE) << 14 | data[12] << 7 | (data[13]
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] & 0xFE) << 14 | data[12] << 7
| (data[13]
& 0xFE) >> 1;
pesdx->PTS = pts;
dts =
(int64_t) (data[14] & 0x0E) << 29 | data[15] << 22
| (data[16] & 0xFE) << 14 | data[17] << 7 |
(data[18] & 0xFE) >> 1;
(int64_t) (data[14] & 0x0E) << 29 | data[15] << 22 | (data[16] & 0xFE) << 14 | data[17] <<
7 | (data[18] & 0xFE) >> 1;
pesdx->DTS = dts;
Debug(4, "pesdemux: pts %#012" PRIx64 " %#012" PRIx64 "\n", pts, dts);
}
@ -831,31 +826,24 @@ static void PesParse(PesDemux * pesdx, const uint8_t * data, int size,
if (AudioCodecID != AV_CODEC_ID_PCM_DVD) {
q = pesdx->Header;
Debug(3, "pesdemux: LPCM %d sr:%d bits:%d chan:%d\n",
q[0], q[5] >> 4, (((q[5] >> 6) & 0x3) + 4) * 4,
(q[5] & 0x7) + 1);
Debug(3, "pesdemux: LPCM %d sr:%d bits:%d chan:%d\n", q[0], q[5] >> 4,
(((q[5] >> 6) & 0x3) + 4) * 4, (q[5] & 0x7) + 1);
CodecAudioClose(MyAudioDecoder);
bits_per_sample = (((q[5] >> 6) & 0x3) + 4) * 4;
if (bits_per_sample != 16) {
Error(_
("softhddev: LPCM %d bits per sample aren't supported\n"),
bits_per_sample);
Error(_("softhddev: LPCM %d bits per sample aren't supported\n"), bits_per_sample);
// FIXME: handle unsupported formats.
}
samplerate = samplerates[q[5] >> 4];
channels = (q[5] & 0x7) + 1;
AudioSetup(&samplerate, &channels, 0);
if (samplerate != samplerates[q[5] >> 4]) {
Error(_
("softhddev: LPCM %d sample-rate is unsupported\n"),
samplerates[q[5] >> 4]);
Error(_("softhddev: LPCM %d sample-rate is unsupported\n"), samplerates[q[5] >> 4]);
// FIXME: support resample
}
if (channels != (q[5] & 0x7) + 1) {
Error(_
("softhddev: LPCM %d channels are unsupported\n"),
(q[5] & 0x7) + 1);
Error(_("softhddev: LPCM %d channels are unsupported\n"), (q[5] & 0x7) + 1);
// FIXME: support resample
}
//CodecAudioOpen(MyAudioDecoder, AV_CODEC_ID_PCM_DVD);
@ -949,8 +937,7 @@ static int TsDemuxer(TsDemux * tsdx, const uint8_t * data, int size)
}
#ifdef DEBUG
pid = (p[1] & 0x1F) << 8 | p[2];
Debug(4, "tsdemux: PID: %#04x%s%s\n", pid, p[1] & 0x40 ? " start" : "",
p[3] & 0x10 ? " payload" : "");
Debug(4, "tsdemux: PID: %#04x%s%s\n", pid, p[1] & 0x40 ? " start" : "", p[3] & 0x10 ? " payload" : "");
#endif
// skip adaptation field
switch (p[3] & 0x30) { // adaption field
@ -1035,8 +1022,7 @@ int PlayAudio(const uint8_t * data, int size, uint8_t id)
}
#ifdef USE_SOFTLIMIT
// soft limit buffer full
if (AudioSyncStream && VideoGetBuffers(AudioSyncStream) > 3
&& AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE * 2) {
if (AudioSyncStream && VideoGetBuffers(AudioSyncStream) > 3 && AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE * 2) {
return 0;
}
#endif
@ -1061,14 +1047,13 @@ int PlayAudio(const uint8_t * data, int size, uint8_t id)
if (data[7] & 0x80 && n >= 5) {
AudioAvPkt->pts =
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] &
0xFE) << 14 | data[12] << 7 | (data[13] & 0xFE) >> 1;
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] & 0xFE) << 14 | data[12] << 7 | (data[13] &
0xFE) >> 1;
//Debug(3, "audio: pts %#012" PRIx64 "\n", AudioAvPkt->pts);
}
if (0) { // dts is unused
if (data[7] & 0x40) {
AudioAvPkt->dts =
(int64_t) (data[14] & 0x0E) << 29 | data[15] << 22 | (data[16]
AudioAvPkt->dts = (int64_t) (data[14] & 0x0E) << 29 | data[15] << 22 | (data[16]
& 0xFE) << 14 | data[17] << 7 | (data[18] & 0xFE) >> 1;
Debug(3, "audio: dts %#012" PRIx64 "\n", AudioAvPkt->dts);
}
@ -1098,16 +1083,13 @@ int PlayAudio(const uint8_t * data, int size, uint8_t id)
int channels;
int bits_per_sample;
Debug(3, "[softhddev]%s: LPCM %d sr:%d bits:%d chan:%d\n",
__FUNCTION__, id, p[5] >> 4, (((p[5] >> 6) & 0x3) + 4) * 4,
(p[5] & 0x7) + 1);
Debug(3, "[softhddev]%s: LPCM %d sr:%d bits:%d chan:%d\n", __FUNCTION__, id, p[5] >> 4,
(((p[5] >> 6) & 0x3) + 4) * 4, (p[5] & 0x7) + 1);
CodecAudioClose(MyAudioDecoder);
bits_per_sample = (((p[5] >> 6) & 0x3) + 4) * 4;
if (bits_per_sample != 16) {
Error(_
("[softhddev] LPCM %d bits per sample aren't supported\n"),
bits_per_sample);
Error(_("[softhddev] LPCM %d bits per sample aren't supported\n"), bits_per_sample);
// FIXME: handle unsupported formats.
}
samplerate = samplerates[p[5] >> 4];
@ -1117,13 +1099,11 @@ int PlayAudio(const uint8_t * data, int size, uint8_t id)
AudioSetBufferTime(400);
AudioSetup(&samplerate, &channels, 0);
if (samplerate != samplerates[p[5] >> 4]) {
Error(_("[softhddev] LPCM %d sample-rate is unsupported\n"),
samplerates[p[5] >> 4]);
Error(_("[softhddev] LPCM %d sample-rate is unsupported\n"), samplerates[p[5] >> 4]);
// FIXME: support resample
}
if (channels != (p[5] & 0x7) + 1) {
Error(_("[softhddev] LPCM %d channels are unsupported\n"),
(p[5] & 0x7) + 1);
Error(_("[softhddev] LPCM %d channels are unsupported\n"), (p[5] & 0x7) + 1);
// FIXME: support resample
}
//CodecAudioOpen(MyAudioDecoder, AV_CODEC_ID_PCM_DVD);
@ -1268,8 +1248,7 @@ int PlayTsAudio(const uint8_t * data, int size)
}
#ifdef USE_SOFTLIMIT
// soft limit buffer full
if (AudioSyncStream && VideoGetBuffers(AudioSyncStream) > 3
&& AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE * 2) {
if (AudioSyncStream && VideoGetBuffers(AudioSyncStream) > 3 && AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE * 2) {
return 0;
}
#endif
@ -1411,8 +1390,7 @@ static void VideoPacketExit(VideoStream * stream)
** @param data data of pes packet
** @param size size of pes packet
*/
static void VideoEnqueue(VideoStream * stream, int64_t pts, int64_t dts, const void *data,
int size)
static void VideoEnqueue(VideoStream * stream, int64_t pts, int64_t dts, const void *data, int size)
{
AVPacket *avpkt;
@ -1528,8 +1506,7 @@ static void VideoNextPacket(VideoStream * stream, int codec_id)
** @param data data of pes packet
** @param size size of pes packet
*/
static void VideoMpegEnqueue(VideoStream * stream, int64_t pts, int64_t dts,
const uint8_t * data, int size)
static void VideoMpegEnqueue(VideoStream * stream, int64_t pts, int64_t dts, const uint8_t * data, int size)
{
static const char startcode[3] = { 0x00, 0x00, 0x01 };
const uint8_t *p;
@ -1725,8 +1702,8 @@ static void FixPacketForFFMpeg(VideoDecoder * vdecoder, AVPacket * avpkt)
tmp->size = p - tmp->data;
#if STILL_DEBUG>1
if (InStillPicture) {
fprintf(stderr, "\nfix:%9d,%02x %02x %02x %02x\n", tmp->size,
tmp->data[0], tmp->data[1], tmp->data[2], tmp->data[3]);
fprintf(stderr, "\nfix:%9d,%02x %02x %02x %02x\n", tmp->size, tmp->data[0], tmp->data[1], tmp->data[2],
tmp->data[3]);
}
#endif
CodecVideoDecode(vdecoder, tmp);
@ -1742,15 +1719,14 @@ static void FixPacketForFFMpeg(VideoDecoder * vdecoder, AVPacket * avpkt)
#if STILL_DEBUG>1
if (InStillPicture) {
fprintf(stderr, "\nfix:%9d.%02x %02x %02x %02x\n", tmp->size,
tmp->data[0], tmp->data[1], tmp->data[2], tmp->data[3]);
fprintf(stderr, "\nfix:%9d.%02x %02x %02x %02x\n", tmp->size, tmp->data[0], tmp->data[1], tmp->data[2],
tmp->data[3]);
}
#endif
CodecVideoDecode(vdecoder, tmp);
}
#endif
/**
** Open video stream.
**
@ -1782,6 +1758,7 @@ static void VideoStreamClose(VideoStream * stream, int delhw)
stream->SkipStream = 1;
if (stream->Decoder) {
VideoDecoder *decoder;
Debug(3, "VideoStreamClose");
decoder = stream->Decoder;
// FIXME: remove this lock for main stream close
@ -1908,8 +1885,7 @@ int VideoDecodeInput(VideoStream * stream)
if (f) {
Debug(3, "video: cleared upto close\n");
atomic_sub(f, &stream->PacketsFilled);
stream->PacketRead =
(stream->PacketRead + f) % VIDEO_PACKET_MAX;
stream->PacketRead = (stream->PacketRead + f) % VIDEO_PACKET_MAX;
stream->ClearClose = 0;
}
break;
@ -2128,8 +2104,7 @@ static int ValidateMpeg(const uint8_t * data, int size)
return -1;
}
if (data[0] || data[1] || data[2] != 0x01) {
printf("%02x: %02x %02x %02x %02x %02x\n", data[-1], data[0],
data[1], data[2], data[3], data[4]);
printf("%02x: %02x %02x %02x %02x %02x\n", data[-1], data[0], data[1], data[2], data[3], data[4]);
return -1;
}
@ -2167,6 +2142,7 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
int n;
int z;
int l;
if (!stream->Decoder) { // no x11 video started
return size;
}
@ -2197,8 +2173,7 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
}
if (stream->InvalidPesCounter) {
if (stream->InvalidPesCounter > 1) {
Error(_("[softhddev] %d invalid PES video packet(s)\n"),
stream->InvalidPesCounter);
Error(_("[softhddev] %d invalid PES video packet(s)\n"), stream->InvalidPesCounter);
}
stream->InvalidPesCounter = 0;
}
@ -2223,7 +2198,8 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
}
#ifdef USE_SOFTLIMIT
// soft limit buffer full
if (AudioSyncStream == stream && atomic_read(&stream->PacketsFilled) > 3 && AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE * 2) {
if (AudioSyncStream == stream && atomic_read(&stream->PacketsFilled) > 3
&& AudioUsedBytes() > AUDIO_MIN_BUFFER_FREE * 2) {
return 0;
}
#endif
@ -2231,14 +2207,17 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
pts = AV_NOPTS_VALUE;
dts = AV_NOPTS_VALUE;
if ((data[7] & 0xc0) == 0x80) {
pts = (int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] &
0xFE) << 14 | data[12] << 7 | (data[13] & 0xFE) >> 1;
pts =
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] & 0xFE) << 14 | data[12] << 7 | (data[13] &
0xFE) >> 1;
}
if ((data[7] & 0xC0) == 0xc0) {
pts = (int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] &
0xFE) << 14 | data[12] << 7 | (data[13] & 0xFE) >> 1;
dts = (int64_t) (data[14] & 0x0E) << 29 | data[15] << 22 | (data[16] &
0xFE) << 14 | data[17] << 7 | (data[18] & 0xFE) >> 1;
pts =
(int64_t) (data[9] & 0x0E) << 29 | data[10] << 22 | (data[11] & 0xFE) << 14 | data[12] << 7 | (data[13] &
0xFE) >> 1;
dts =
(int64_t) (data[14] & 0x0E) << 29 | data[15] << 22 | (data[16] & 0xFE) << 14 | data[17] << 7 | (data[18] &
0xFE) >> 1;
}
check = data + 9 + n;
@ -2266,11 +2245,8 @@ int PlayVideo3(VideoStream * stream, const uint8_t * data, int size)
int fd;
static int FrameCounter;
snprintf(buf, sizeof(buf), "frame_%06d_%08d.raw", getpid(),
FrameCounter++);
if ((fd =
open(buf, O_WRONLY | O_CLOEXEC | O_CREAT | O_TRUNC,
0666)) >= 0) {
snprintf(buf, sizeof(buf), "frame_%06d_%08d.raw", getpid(), FrameCounter++);
if ((fd = open(buf, O_WRONLY | O_CLOEXEC | O_CREAT | O_TRUNC, 0666)) >= 0) {
if (write(fd, data + 9 + n, size - 9 - n)) {
// this construct is to remove the annoying warning
}
@ -2413,8 +2389,7 @@ extern uint8_t *CreateJpeg(uint8_t *, int *, int, int, int);
**
** @returns allocated jpeg image.
*/
uint8_t *CreateJpeg(uint8_t * image, int raw_size, int *size, int quality,
int width, int height)
uint8_t *CreateJpeg(uint8_t * image, int raw_size, int *size, int quality, int width, int height)
{
struct jpeg_compress_struct cinfo;
struct jpeg_error_mgr jerr;
@ -2569,8 +2544,7 @@ void GetVideoSize(int *width, int *height, double *aspect)
int aspect_den;
if (MyVideoStream->HwDecoder) {
VideoGetVideoSize(MyVideoStream->HwDecoder, width, height, &aspect_num,
&aspect_den);
VideoGetVideoSize(MyVideoStream->HwDecoder, width, height, &aspect_num, &aspect_den);
*aspect = (double)aspect_num / (double)aspect_den;
} else {
*width = 0;
@ -2580,8 +2554,7 @@ void GetVideoSize(int *width, int *height, double *aspect)
#ifdef DEBUG
if (done_width != *width || done_height != *height) {
Debug(3, "[softhddev]%s: %dx%d %g\n", __FUNCTION__, *width, *height,
*aspect);
Debug(3, "[softhddev]%s: %dx%d %g\n", __FUNCTION__, *width, *height, *aspect);
done_width = *width;
done_height = *height;
}
@ -2629,8 +2602,7 @@ void Clear(void)
for (i = 0; MyVideoStream->ClearBuffers && i < 20; ++i) {
usleep(1 * 100);
}
Debug(3, "[softhddev]%s: %dms buffers %d\n", __FUNCTION__, i,
VideoGetBuffers(MyVideoStream));
Debug(3, "[softhddev]%s: %dms buffers %d\n", __FUNCTION__, i, VideoGetBuffers(MyVideoStream));
}
/**
@ -2695,7 +2667,6 @@ void StillPicture(const uint8_t * data, int size)
VideoNextPacket(MyVideoStream, AV_CODEC_ID_NONE); // close last stream
if (MyVideoStream->CodecID == AV_CODEC_ID_NONE) {
// FIXME: should detect codec, see PlayVideo
Error(_("[softhddev] no codec known for still picture\n"));
@ -2802,8 +2773,7 @@ int Poll(int timeout)
filled = atomic_read(&MyVideoStream->PacketsFilled);
// soft limit + hard limit
full = (used > AUDIO_MIN_BUFFER_FREE && filled > 3)
|| AudioFreeBytes() < AUDIO_MIN_BUFFER_FREE
|| filled >= VIDEO_PACKET_MAX - 10;
|| AudioFreeBytes() < AUDIO_MIN_BUFFER_FREE || filled >= VIDEO_PACKET_MAX - 10;
if (!full || !timeout) {
return !full;
@ -2857,8 +2827,7 @@ void GetOsdSize(int *width, int *height, double *aspect)
#ifdef DEBUG
if (done_width != *width || done_height != *height) {
Debug(3, "[softhddev]%s: %dx%d %g\n", __FUNCTION__, *width, *height,
*aspect);
Debug(3, "[softhddev]%s: %dx%d %g\n", __FUNCTION__, *width, *height, *aspect);
done_width = *width;
done_height = *height;
}
@ -2885,8 +2854,7 @@ void OsdClose(void)
** @param x x-coordinate on screen of argb image
** @param y y-coordinate on screen of argb image
*/
void OsdDrawARGB(int xi, int yi, int height, int width, int pitch,
const uint8_t * argb, int x, int y)
void OsdDrawARGB(int xi, int yi, int height, int width, int pitch, const uint8_t * argb, int x, int y)
{
// wakeup display for showing remote learning dialog
VideoDisplayWakeup();
@ -2902,15 +2870,11 @@ const char *CommandLineHelp(void)
{
return " -a device\taudio device (fe. alsa: hw:0,0 oss: /dev/dsp)\n"
" -p device\taudio device for pass-through (hw:0,1 or /dev/dsp1)\n"
" -c channel\taudio mixer channel name (fe. PCM)\n"
" -d display\tdisplay of x11 server (fe. :0.0)\n"
" -c channel\taudio mixer channel name (fe. PCM)\n" " -d display\tdisplay of x11 server (fe. :0.0)\n"
" -f\t\tstart with fullscreen window (only with window manager)\n"
" -g geometry\tx11 window geometry wxh+x+y\n"
" -v device\tvideo driver device (cuvid)\n"
" -s\t\tstart in suspended mode\n"
" -x\t\tstart x11 server, with -xx try to connect, if this fails\n"
" -X args\tX11 server arguments (f.e. -nocursor)\n"
" -w workaround\tenable/disable workarounds\n"
" -g geometry\tx11 window geometry wxh+x+y\n" " -v device\tvideo driver device (cuvid)\n"
" -s\t\tstart in suspended mode\n" " -x\t\tstart x11 server, with -xx try to connect, if this fails\n"
" -X args\tX11 server arguments (f.e. -nocursor)\n" " -w workaround\tenable/disable workarounds\n"
"\tno-hw-decoder\t\tdisable hw decoder, use software decoder only\n"
"\tno-mpeg-hw-decoder\tdisable hw decoder for mpeg only\n"
"\tstill-hw-decoder\tenable hardware decoder for still-pictures\n"
@ -2919,8 +2883,7 @@ const char *CommandLineHelp(void)
"\talsa-no-close-open\tdisable close open to fix alsa no sound bug\n"
"\talsa-close-open-delay\tenable close open delay to fix no sound bug\n"
"\tignore-repeat-pict\tdisable repeat pict message\n"
"\tuse-possible-defect-frames prefer faster channel switch\n"
" -D\t\tstart in detached mode\n";
"\tuse-possible-defect-frames prefer faster channel switch\n" " -D\t\tstart in detached mode\n";
}
/**
@ -2961,8 +2924,7 @@ int ProcessArgs(int argc, char *const argv[])
case 'g': // geometry
if (VideoSetGeometry(optarg) < 0) {
fprintf(stderr,
_
("Bad formated geometry please use: [=][<width>{xX}<height>][{+-}<xoffset>{+-}<yoffset>]\n"));
_("Bad formated geometry please use: [=][<width>{xX}<height>][{+-}<xoffset>{+-}<yoffset>]\n"));
return 0;
}
continue;
@ -3004,8 +2966,7 @@ int ProcessArgs(int argc, char *const argv[])
} else if (!strcasecmp("use-possible-defect-frames", optarg)) {
CodecUsePossibleDefectFrames = 1;
} else {
fprintf(stderr, _("Workaround '%s' unsupported\n"),
optarg);
fprintf(stderr, _("Workaround '%s' unsupported\n"), optarg);
return 0;
}
continue;
@ -3015,8 +2976,7 @@ int ProcessArgs(int argc, char *const argv[])
fprintf(stderr, _("We need no long options\n"));
return 0;
case ':':
fprintf(stderr, _("Missing argument for option '%c'\n"),
optopt);
fprintf(stderr, _("Missing argument for option '%c'\n"), optopt);
return 0;
default:
fprintf(stderr, _("Unknown option '%c'\n"), optopt);
@ -3116,8 +3076,7 @@ static void StartXServer(void)
usr1.sa_handler = Usr1Handler;
sigaction(SIGUSR1, &usr1, NULL);
Debug(3, "x-setup: Starting X server '%s' '%s'\n", args[0],
X11ServerArguments);
Debug(3, "x-setup: Starting X server '%s' '%s'\n", args[0], X11ServerArguments);
// fork
if ((pid = fork())) { // parent
@ -3189,12 +3148,10 @@ void SoftHdDeviceExit(void)
kill(X11ServerPid, SIGKILL);
} while (waittime < timeout);
if (wpid && WIFEXITED(status)) {
Debug(3, "x-setup: x11 server exited (%d)\n",
WEXITSTATUS(status));
Debug(3, "x-setup: x11 server exited (%d)\n", WEXITSTATUS(status));
}
if (wpid && WIFSIGNALED(status)) {
Debug(3, "x-setup: x11 server killed (%d)\n",
WTERMSIG(status));
Debug(3, "x-setup: x11 server killed (%d)\n", WTERMSIG(status));
}
}
}
@ -3247,8 +3204,7 @@ int Start(void)
PesInit(PesDemuxAudio);
#endif
Info(_("[softhddev] ready%s\n"),
ConfigStartSuspended ? ConfigStartSuspended ==
-1 ? " detached" : " suspended" : "");
ConfigStartSuspended ? ConfigStartSuspended == -1 ? " detached" : " suspended" : "");
return ConfigStartSuspended;
}
@ -3281,12 +3237,10 @@ void Housekeeping(void)
wpid = waitpid(X11ServerPid, &status, WNOHANG);
if (wpid) {
if (WIFEXITED(status)) {
Debug(3, "x-setup: x11 server exited (%d)\n",
WEXITSTATUS(status));
Debug(3, "x-setup: x11 server exited (%d)\n", WEXITSTATUS(status));
}
if (WIFSIGNALED(status)) {
Debug(3, "x-setup: x11 server killed (%d)\n",
WTERMSIG(status));
Debug(3, "x-setup: x11 server killed (%d)\n", WTERMSIG(status));
}
X11ServerPid = 0;
// video not running
@ -3452,8 +3406,7 @@ void ScaleVideo(int x, int y, int width, int height)
** @param pip_width pip window width OSD relative
** @param pip_height pip window height OSD relative
*/
void PipSetPosition(int x, int y, int width, int height, int pip_x, int pip_y,
int pip_width, int pip_height)
void PipSetPosition(int x, int y, int width, int height, int pip_x, int pip_y, int pip_width, int pip_height)
{
if (!MyVideoStream->HwDecoder) { // video not running
return;
@ -3463,8 +3416,7 @@ void PipSetPosition(int x, int y, int width, int height, int pip_x, int pip_y,
if (!PipVideoStream->HwDecoder) { // pip not running
return;
}
VideoSetOutputPosition(PipVideoStream->HwDecoder, pip_x, pip_y, pip_width,
pip_height);
VideoSetOutputPosition(PipVideoStream->HwDecoder, pip_x, pip_y, pip_width, pip_height);
}
/**
@ -3479,8 +3431,7 @@ void PipSetPosition(int x, int y, int width, int height, int pip_x, int pip_y,
** @param pip_width pip window width OSD relative
** @param pip_height pip window height OSD relative
*/
void PipStart(int x, int y, int width, int height, int pip_x, int pip_y,
int pip_width, int pip_height)
void PipStart(int x, int y, int width, int height, int pip_x, int pip_y, int pip_width, int pip_height)
{
if (!MyVideoStream->HwDecoder) { // video not running
return;

View File

@ -25,8 +25,7 @@ extern "C"
{
#endif
/// C callback feed key press
extern void FeedKeyPress(const char *, const char *, int, int,
const char *);
extern void FeedKeyPress(const char *, const char *, int, int, const char *);
/// C plugin get osd size and ascpect
extern void GetOsdSize(int *, int *, double *);
@ -34,8 +33,7 @@ extern "C"
/// C plugin close osd
extern void OsdClose(void);
/// C plugin draw osd pixmap
extern void OsdDrawARGB(int, int, int, int, int, const uint8_t *, int,
int);
extern void OsdDrawARGB(int, int, int, int, int, const uint8_t *, int, int);
/// C plugin play audio packet
extern int PlayAudio(const uint8_t *, int, uint8_t);

616
video.c

File diff suppressed because it is too large Load Diff

13
video.h
View File

@ -62,20 +62,17 @@ extern unsigned VideoGetSurface(VideoHwDecoder *, const AVCodecContext *);
extern void VideoReleaseSurface(VideoHwDecoder *, unsigned);
/// Callback to negotiate the PixelFormat.
extern enum AVPixelFormat Video_get_format(VideoHwDecoder *, AVCodecContext *,
const enum AVPixelFormat *);
extern enum AVPixelFormat Video_get_format(VideoHwDecoder *, AVCodecContext *, const enum AVPixelFormat *);
/// Render a ffmpeg frame.
extern void VideoRenderFrame(VideoHwDecoder *, const AVCodecContext *,
const AVFrame *);
extern void VideoRenderFrame(VideoHwDecoder *, const AVCodecContext *, const AVFrame *);
/// Get hwaccel context for ffmpeg.
extern void *VideoGetHwAccelContext(VideoHwDecoder *);
#ifdef AVCODEC_VDPAU_H
/// Draw vdpau render state.
extern void VideoDrawRenderState(VideoHwDecoder *,
struct vdpau_render_state *);
extern void VideoDrawRenderState(VideoHwDecoder *, struct vdpau_render_state *);
#endif
#ifdef USE_OPENGLOSD
@ -189,8 +186,7 @@ extern void VideoSetAutoCrop(int, int, int);
extern void VideoOsdClear(void);
/// Draw an OSD ARGB image.
extern void VideoOsdDrawARGB(int, int, int, int, int, const uint8_t *, int,
int);
extern void VideoOsdDrawARGB(int, int, int, int, int, const uint8_t *, int, int);
/// Get OSD size.
extern void VideoGetOsdSize(int *, int *);
@ -248,6 +244,7 @@ extern void SetDPMSatBlackScreen(int);
/// Raise the frontend window
extern int VideoRaiseWindow(void);
#ifdef USE_OPENGLOSD
extern void ActivateOsd(GLuint, int, int, int, int);
#endif