Video bug fix.

Made video_test working again.
Disabled VA-API Intel vaAssociateSubpicture workaround.
Fix bug: Must release lock for VideoPollEvent.
Allow faster video and audio sync.
Fix bug: Software decoder use vaPutImage with Intel backend.
Fix bug: Artefacts are shown after mpeg2 channel switch.
Fix bug: VideoReleaseSurface called after VideoExit.
This commit is contained in:
Johns 2012-01-27 21:08:37 +01:00
parent bcf6ecabc1
commit f8d198636b
8 changed files with 275 additions and 138 deletions

View File

@ -1,6 +1,13 @@
User johns User johns
Date: Date:
Made video_test working again.
Disabled VA-API Intel vaAssociateSubpicture workaround.
Fix bug: Must release lock for VideoPollEvent.
Allow faster video and audio sync.
Fix bug: Software decoder use vaPutImage with intel backend.
Fix bug: Artefacts are shown after mpeg2 channel switch.
Fix bug: VideoReleaseSurface called after VideoExit.
Support external players. Support external players.
Add VDPAU display preemption support. Add VDPAU display preemption support.

View File

@ -18,7 +18,7 @@ GIT_REV = $(shell git describe --always 2>/dev/null)
### Configuration (edit this for your needs) ### Configuration (edit this for your needs)
CONFIG := #-DDEBUG CONFIG := -DDEBUG
#CONFIG += -DHAVE_PTHREAD_NAME #CONFIG += -DHAVE_PTHREAD_NAME
CONFIG += $(shell pkg-config --exists vdpau && echo "-DUSE_VDPAU") CONFIG += $(shell pkg-config --exists vdpau && echo "-DUSE_VDPAU")
CONFIG += $(shell pkg-config --exists libva && echo "-DUSE_VAAPI") CONFIG += $(shell pkg-config --exists libva && echo "-DUSE_VAAPI")
@ -174,6 +174,6 @@ indent:
indent $$i; unexpand -a $$i > $$i.up; mv $$i.up $$i; \ indent $$i; unexpand -a $$i > $$i.up; mv $$i.up $$i; \
done done
video_test: video.c video_test: video.c Makefile
$(CC) -DVIDEO_TEST -DVERSION='"$(VERSION)"' $(CFLAGS) $(LDFLAGS) $< $(LIBS) \ $(CC) -DVIDEO_TEST -DVERSION='"$(VERSION)"' $(CFLAGS) $(LDFLAGS) $< $(LIBS) \
-o $@ -o $@

3
Todo
View File

@ -44,7 +44,7 @@ vdpau:
libva: libva:
yaepghd (VaapiSetOutputPosition) support yaepghd (VaapiSetOutputPosition) support
can associate ony displayed part of osd can associate only displayed part of osd
grab image for va-api grab image for va-api
still many: still many:
[drm:i915_hangcheck_elapsed] *ERROR* Hangcheck timer elapsed... GPU hung [drm:i915_hangcheck_elapsed] *ERROR* Hangcheck timer elapsed... GPU hung
@ -56,6 +56,7 @@ libva: branch vaapi-ext
libva-intel-driver: libva-intel-driver:
1080i does no v-sync (sometimes correct working with vaapi-ext) 1080i does no v-sync (sometimes correct working with vaapi-ext)
OSD has sometimes wrong size (workaround written) OSD has sometimes wrong size (workaround written)
software decoder needs UV swab
libva-vdpau-driver: libva-vdpau-driver:
G210/GT520 OSD update too slow (needs hardware problem workaround) G210/GT520 OSD update too slow (needs hardware problem workaround)

30
audio.c
View File

@ -137,7 +137,8 @@ static unsigned AudioSampleRate; ///< audio sample rate in hz
static unsigned AudioChannels; ///< number of audio channels static unsigned AudioChannels; ///< number of audio channels
static const int AudioBytesProSample = 2; ///< number of bytes per sample static const int AudioBytesProSample = 2; ///< number of bytes per sample
static int64_t AudioPTS; ///< audio pts clock static int64_t AudioPTS; ///< audio pts clock
static const int AudioBufferTime = 350; ///< audio buffer time in ms static const int AudioBufferTime = 300; ///< audio buffer time in ms
static int AudioMoreBufferTime = 1; ///< increase buffer time
#ifdef USE_AUDIO_THREAD #ifdef USE_AUDIO_THREAD
static pthread_t AudioThread; ///< audio play thread static pthread_t AudioThread; ///< audio play thread
@ -1086,11 +1087,13 @@ static int AlsaSetup(int *freq, int *channels, int use_ac3)
AlsaStartThreshold = snd_pcm_frames_to_bytes(AlsaPCMHandle, period_size); AlsaStartThreshold = snd_pcm_frames_to_bytes(AlsaPCMHandle, period_size);
// buffer time/delay in ms // buffer time/delay in ms
if (AlsaStartThreshold < if (AlsaStartThreshold <
(*freq * *channels * AudioBytesProSample * AudioBufferTime) / 1000U) { (*freq * *channels * AudioBytesProSample * AudioMoreBufferTime *
AudioBufferTime) / 1000U) {
AlsaStartThreshold = AlsaStartThreshold =
(*freq * *channels * AudioBytesProSample * AudioBufferTime) / (*freq * *channels * AudioBytesProSample * AudioMoreBufferTime *
1000U; AudioBufferTime) / 1000U;
} }
AudioMoreBufferTime = 1;
// no bigger, than the buffer // no bigger, than the buffer
if (AlsaStartThreshold > RingBufferFreeBytes(AlsaRingBuffer)) { if (AlsaStartThreshold > RingBufferFreeBytes(AlsaRingBuffer)) {
AlsaStartThreshold = RingBufferFreeBytes(AlsaRingBuffer); AlsaStartThreshold = RingBufferFreeBytes(AlsaRingBuffer);
@ -1709,12 +1712,13 @@ static int OssSetup(int *freq, int *channels, int use_ac3)
OssStartThreshold = bi.bytes + tmp; OssStartThreshold = bi.bytes + tmp;
// buffer time/delay in ms // buffer time/delay in ms
if (OssStartThreshold < if (OssStartThreshold <
(*freq * *channels * AudioBytesProSample * AudioBufferTime) / (*freq * *channels * AudioBytesProSample * AudioMoreBufferTime *
1000U) { AudioBufferTime) / 1000U) {
OssStartThreshold = OssStartThreshold =
(*freq * *channels * AudioBytesProSample * AudioBufferTime) / (*freq * *channels * AudioBytesProSample *
1000U; AudioMoreBufferTime * AudioBufferTime) / 1000U;
} }
AudioMoreBufferTime = 1;
// no bigger, than the buffer // no bigger, than the buffer
if (OssStartThreshold > RingBufferFreeBytes(OssRingBuffer)) { if (OssStartThreshold > RingBufferFreeBytes(OssRingBuffer)) {
OssStartThreshold = RingBufferFreeBytes(OssRingBuffer); OssStartThreshold = RingBufferFreeBytes(OssRingBuffer);
@ -2097,6 +2101,16 @@ int AudioSetup(int *freq, int *channels, int use_ac3)
return AudioUsedModule->Setup(freq, channels, use_ac3); return AudioUsedModule->Setup(freq, channels, use_ac3);
} }
/**
** Increase audio buffer time.
**
** Some channels need a bigger audio buffer to buffer video.
*/
void AudioIncreaseBufferTime(void)
{
AudioMoreBufferTime = 4;
}
/** /**
** Set pcm audio device. ** Set pcm audio device.
** **

View File

@ -42,6 +42,8 @@ extern int AudioSetup(int *, int *, int); ///< setup audio output
//extern void AudioPlay(void); ///< play audio //extern void AudioPlay(void); ///< play audio
//extern void AudioPause(void); ///< pause audio //extern void AudioPause(void); ///< pause audio
extern void AudioIncreaseBufferTime(void); ///< use bigger buffer
extern void AudioSetDevice(const char *); ///< set PCM audio device extern void AudioSetDevice(const char *); ///< set PCM audio device
extern void AudioSetDeviceAC3(const char *); ///< set Passthrough device extern void AudioSetDeviceAC3(const char *); ///< set Passthrough device
extern void AudioInit(void); ///< setup audio module extern void AudioInit(void); ///< setup audio module

View File

@ -561,13 +561,13 @@ void CodecVideoDecode(VideoDecoder * decoder, const AVPacket * avpkt)
video_ctx->frame_number, used); video_ctx->frame_number, used);
} }
if (used != pkt->size) { if (used != pkt->size) {
if (used >= 0) { if (used >= 0 && used < pkt->size) {
// some tv channels, produce this // some tv channels, produce this
Debug(4, Debug(4,
"codec: ooops didn't use complete video packet used %d of %d\n", "codec: ooops didn't use complete video packet used %d of %d\n",
used, pkt->size); used, pkt->size);
pkt->data += used;
pkt->size -= used; pkt->size -= used;
pkt->data += used;
goto next_part; goto next_part;
} }
Debug(3, "codec: bad frame %d\n", used); Debug(3, "codec: bad frame %d\n", used);

View File

@ -124,6 +124,8 @@ static const uint16_t SampleRateTable[4] = {
** FrameLengthInBytes = (12 * BitRate / SampleRate + Padding) * 4 ** FrameLengthInBytes = (12 * BitRate / SampleRate + Padding) * 4
** Layer II & III: ** Layer II & III:
** FrameLengthInBytes = 144 * BitRate / SampleRate + Padding ** FrameLengthInBytes = 144 * BitRate / SampleRate + Padding
**
** @todo sometimes detects wrong position
*/ */
static int FindAudioSync(const AVPacket * avpkt) static int FindAudioSync(const AVPacket * avpkt)
{ {
@ -289,17 +291,18 @@ int PlayAudio(const uint8_t * data, int size,
return osize; return osize;
} }
avpkt->pts = AV_NOPTS_VALUE;
AudioIncreaseBufferTime();
CodecAudioOpen(MyAudioDecoder, NULL, CODEC_ID_MP2); CodecAudioOpen(MyAudioDecoder, NULL, CODEC_ID_MP2);
AudioCodecID = CODEC_ID_MP2; AudioCodecID = CODEC_ID_MP2;
data += n; data += n;
size -= n; size -= n;
} }
}
// no decoder or codec known // no decoder or codec known
if (AudioCodecID == CODEC_ID_NONE) { if (AudioCodecID == CODEC_ID_NONE) {
return osize; return osize;
} }
}
avpkt->data = (void *)data; avpkt->data = (void *)data;
avpkt->size = size; avpkt->size = size;
@ -350,7 +353,7 @@ static volatile char Usr1Signal; ///< true got usr1 signal
static AVPacket VideoPacketRb[VIDEO_PACKET_MAX]; static AVPacket VideoPacketRb[VIDEO_PACKET_MAX];
static int VideoPacketWrite; ///< write pointer static int VideoPacketWrite; ///< write pointer
static int VideoPacketRead; ///< read pointer static int VideoPacketRead; ///< read pointer
static atomic_t VideoPacketsFilled; ///< how many of the buffer is used atomic_t VideoPacketsFilled; ///< how many of the buffer is used
static volatile char VideoClearBuffers; ///< clear video buffers static volatile char VideoClearBuffers; ///< clear video buffers
static volatile char SkipVideo; ///< skip video static volatile char SkipVideo; ///< skip video
@ -599,6 +602,7 @@ static void StopVideo(void)
VideoOsdExit(); VideoOsdExit();
VideoExit(); VideoExit();
if (MyVideoDecoder) { if (MyVideoDecoder) {
// FIXME: this can crash, hw decoder released by video exit
CodecVideoClose(MyVideoDecoder); CodecVideoClose(MyVideoDecoder);
CodecVideoDelDecoder(MyVideoDecoder); CodecVideoDelDecoder(MyVideoDecoder);
MyVideoDecoder = NULL; MyVideoDecoder = NULL;
@ -791,8 +795,7 @@ int PlayVideo(const uint8_t * data, int size)
return size; return size;
} }
// FIXME: incomplete packets produce artefacts after channel switch // FIXME: incomplete packets produce artefacts after channel switch
if (atomic_read(&VideoPacketsFilled) if (0 && VideoCodecID == CODEC_ID_MPEG2VIDEO) {
&& VideoCodecID == CODEC_ID_MPEG2VIDEO) {
// mpeg codec supports incomplete packets // mpeg codec supports incomplete packets
// waiting for a full complete packages, increases needed delays // waiting for a full complete packages, increases needed delays
VideoNextPacket(CODEC_ID_MPEG2VIDEO); VideoNextPacket(CODEC_ID_MPEG2VIDEO);

330
video.c
View File

@ -297,6 +297,7 @@ static xcb_atom_t NetWmState; ///< wm-state message atom
static xcb_atom_t NetWmStateFullscreen; ///< fullscreen wm-state message atom static xcb_atom_t NetWmStateFullscreen; ///< fullscreen wm-state message atom
extern uint32_t VideoSwitch; ///< ticks for channel switch extern uint32_t VideoSwitch; ///< ticks for channel switch
extern atomic_t VideoPacketsFilled; ///< how many of the buffer is used
#ifdef USE_VIDEO_THREAD #ifdef USE_VIDEO_THREAD
@ -315,6 +316,8 @@ static int OsdDirtyY; ///< osd dirty area y
static int OsdDirtyWidth; ///< osd dirty area width static int OsdDirtyWidth; ///< osd dirty area width
static int OsdDirtyHeight; ///< osd dirty area height static int OsdDirtyHeight; ///< osd dirty area height
static int64_t VideoDeltaPTS; ///< FIXME: fix pts
//---------------------------------------------------------------------------- //----------------------------------------------------------------------------
// Functions // Functions
//---------------------------------------------------------------------------- //----------------------------------------------------------------------------
@ -1065,6 +1068,7 @@ struct _vaapi_decoder_
VAImage DeintImages[5]; ///< deinterlace image buffers VAImage DeintImages[5]; ///< deinterlace image buffers
// FIXME: int PutSurface; ///< flag put surface ok
VAImage Image[1]; ///< image buffer to update surface VAImage Image[1]; ///< image buffer to update surface
struct vaapi_context VaapiContext[1]; ///< ffmpeg VA-API context struct vaapi_context VaapiContext[1]; ///< ffmpeg VA-API context
@ -1555,6 +1559,7 @@ static void VaapiCleanup(VaapiDecoder * decoder)
} }
decoder->PTS = AV_NOPTS_VALUE; decoder->PTS = AV_NOPTS_VALUE;
VideoDeltaPTS = 0;
} }
/// ///
@ -1775,8 +1780,10 @@ static void VaapiUpdateOutput(VaapiDecoder * decoder)
Debug(3, "video: aspect output %dx%d+%d+%d\n", decoder->OutputWidth, Debug(3, "video: aspect output %dx%d+%d+%d\n", decoder->OutputWidth,
decoder->OutputHeight, decoder->OutputX, decoder->OutputY); decoder->OutputHeight, decoder->OutputX, decoder->OutputY);
#ifdef USE_AUTOCROP
decoder->AutoCrop->State = 0; decoder->AutoCrop->State = 0;
decoder->AutoCrop->Count = 0; decoder->AutoCrop->Count = 0;
#endif
} }
/// ///
@ -1852,6 +1859,7 @@ static enum PixelFormat Vaapi_get_format(VaapiDecoder * decoder,
// create initial black surface and display // create initial black surface and display
VaapiBlackSurface(decoder); VaapiBlackSurface(decoder);
// cleanup last context
VaapiCleanup(decoder); VaapiCleanup(decoder);
if (!VideoHardwareDecoder) { // hardware disabled by config if (!VideoHardwareDecoder) { // hardware disabled by config
@ -2083,8 +2091,8 @@ static void VaapiPutSurfaceX11(VaapiDecoder * decoder, VASurfaceID surface,
// video dst // video dst
decoder->OutputX, decoder->OutputY, decoder->OutputWidth, decoder->OutputX, decoder->OutputY, decoder->OutputWidth,
decoder->OutputHeight, NULL, 0, decoder->OutputHeight, NULL, 0,
type | decoder->SurfaceFlagsTable[decoder->Resolution])) != type | decoder->SurfaceFlagsTable[decoder->Resolution]))
VA_STATUS_SUCCESS) { != VA_STATUS_SUCCESS) {
// switching video kills VdpPresentationQueueBlockUntilSurfaceIdle // switching video kills VdpPresentationQueueBlockUntilSurfaceIdle
Error(_("video/vaapi: vaPutSurface failed %d\n"), status); Error(_("video/vaapi: vaPutSurface failed %d\n"), status);
} }
@ -2619,7 +2627,7 @@ static void VaapiQueueSurface(VaapiDecoder * decoder, VASurfaceID surface,
VaapiReleaseSurface(decoder, old); VaapiReleaseSurface(decoder, old);
} }
} }
#if 1 #if 0
// FIXME: intel seems to forget this, nvidia GT 210 has speed problems here // FIXME: intel seems to forget this, nvidia GT 210 has speed problems here
if (VaapiBuggyIntel && VaOsdSubpicture != VA_INVALID_ID) { if (VaapiBuggyIntel && VaOsdSubpicture != VA_INVALID_ID) {
// FIXME: associate only if osd is displayed // FIXME: associate only if osd is displayed
@ -2711,6 +2719,47 @@ static void VaapiBlackSurface(VaapiDecoder * decoder)
Debug(3, "video/vaapi: associate %08x\n", decoder->BlackSurface); Debug(3, "video/vaapi: associate %08x\n", decoder->BlackSurface);
// FIXME: check if intel forgets this also // FIXME: check if intel forgets this also
if (0 && decoder->Image->image_id == VA_INVALID_ID) {
VAImageFormat format[1];
void *va_image_data;
int i;
printf("No image\n");
VaapiFindImageFormat(decoder, PIX_FMT_NV12, format);
if ((status =
vaDeriveImage(decoder->VaDisplay, decoder->BlackSurface,
decoder->Image)) != VA_STATUS_SUCCESS) {
Error(_("video/vaapi: vaDeriveImage failed %d\n"), status);
if (vaCreateImage(VaDisplay, format, VideoWindowWidth,
VideoWindowHeight,
decoder->Image) != VA_STATUS_SUCCESS) {
Error(_("video/vaapi: can't create image!\n"));
}
}
if (vaMapBuffer(VaDisplay, decoder->Image->buf, &va_image_data)
!= VA_STATUS_SUCCESS) {
Error(_("video/vaapi: can't map the image!\n"));
}
for (i = 0; (unsigned)i < decoder->Image->data_size; i += 2) {
((uint8_t *) va_image_data)[i + 0] = 0xFF;
((uint8_t *) va_image_data)[i + 1] = 0xFF;
}
if (vaUnmapBuffer(VaDisplay,
decoder->Image->buf) != VA_STATUS_SUCCESS) {
Error(_("video/vaapi: can't unmap the image!\n"));
}
}
// FIXME: intel didn't support put image.
if (0
&& vaPutImage(VaDisplay, decoder->BlackSurface,
decoder->Image->image_id, 0, 0, VideoWindowWidth,
VideoWindowHeight, 0, 0, VideoWindowWidth, VideoWindowHeight)
!= VA_STATUS_SUCCESS) {
Error(_("video/vaapi: can't put image!\n"));
}
start = GetMsTicks(); start = GetMsTicks();
if (vaSyncSurface(decoder->VaDisplay, if (vaSyncSurface(decoder->VaDisplay,
decoder->BlackSurface) != VA_STATUS_SUCCESS) { decoder->BlackSurface) != VA_STATUS_SUCCESS) {
@ -3271,6 +3320,7 @@ static void VaapiRenderFrame(VaapiDecoder * decoder,
AVPicture picture[1]; AVPicture picture[1];
int width; int width;
int height; int height;
int put_image;
Debug(4, "video/vaapi: hw render sw surface\n"); Debug(4, "video/vaapi: hw render sw surface\n");
@ -3312,6 +3362,18 @@ static void VaapiRenderFrame(VaapiDecoder * decoder,
// FIXME: Need to insert software deinterlace here // FIXME: Need to insert software deinterlace here
// FIXME: can insert auto-crop here // FIXME: can insert auto-crop here
// get a free surface and upload the image
surface = VaapiGetSurface(decoder);
Debug(4, "video/vaapi: video surface %#010x displayed\n", surface);
put_image = !VaapiBuggyIntel;
if (!put_image
&& (i =
vaDeriveImage(decoder->VaDisplay, surface,
decoder->Image)) != VA_STATUS_SUCCESS) {
Error(_("video/vaapi: vaDeriveImage failed %d\n"), i);
put_image = 1;
}
// //
// Copy data from frame to image // Copy data from frame to image
// //
@ -3319,6 +3381,16 @@ static void VaapiRenderFrame(VaapiDecoder * decoder,
!= VA_STATUS_SUCCESS) { != VA_STATUS_SUCCESS) {
Error(_("video/vaapi: can't map the image!\n")); Error(_("video/vaapi: can't map the image!\n"));
} }
if (decoder->Image->format.fourcc == VA_FOURCC_NV12) {
static int warned;
// FIXME: intel NV12 convert YV12 to NV12
if (!warned) {
warned = 1;
Error(_("video/vaapi: FIXME: yv12->nv12 not written\n"));
}
} else {
// FIXME: I420 vs YV12 // FIXME: I420 vs YV12
for (i = 0; (unsigned)i < decoder->Image->num_planes; ++i) { for (i = 0; (unsigned)i < decoder->Image->num_planes; ++i) {
picture->data[i] = va_image_data + decoder->Image->offsets[i]; picture->data[i] = va_image_data + decoder->Image->offsets[i];
@ -3327,20 +3399,21 @@ static void VaapiRenderFrame(VaapiDecoder * decoder,
av_picture_copy(picture, (AVPicture *) frame, video_ctx->pix_fmt, av_picture_copy(picture, (AVPicture *) frame, video_ctx->pix_fmt,
width, height); width, height);
}
if (vaUnmapBuffer(VaDisplay, decoder->Image->buf) != VA_STATUS_SUCCESS) { if (vaUnmapBuffer(VaDisplay, decoder->Image->buf) != VA_STATUS_SUCCESS) {
Error(_("video/vaapi: can't unmap the image!\n")); Error(_("video/vaapi: can't unmap the image!\n"));
} }
// get a free surface and upload the image
surface = VaapiGetSurface(decoder);
Debug(4, "video/vaapi: video surface %#010x displayed\n", surface);
Debug(4, "video/vaapi: buffer %dx%d <- %dx%d\n", decoder->Image->width, Debug(4, "video/vaapi: buffer %dx%d <- %dx%d\n", decoder->Image->width,
decoder->Image->height, width, height); decoder->Image->height, width, height);
// FIXME: intel didn't support put image. // FIXME: intel didn't support put image.
if ((i = vaPutImage(VaDisplay, surface, decoder->Image->image_id, 0, 0, if (put_image
width, height, 0, 0, width, height) && (i =
) != VA_STATUS_SUCCESS) { vaPutImage(VaDisplay, surface, decoder->Image->image_id, 0, 0,
width, height, 0, 0, width,
height)) != VA_STATUS_SUCCESS) {
Error(_("video/vaapi: can't put image err:%d!\n"), i); Error(_("video/vaapi: can't put image err:%d!\n"), i);
} }
@ -3396,6 +3469,16 @@ static void VaapiAdvanceFrame(void)
!= VA_STATUS_SUCCESS) { != VA_STATUS_SUCCESS) {
Error(_("video/vaapi: vaSyncSurface failed\n")); Error(_("video/vaapi: vaSyncSurface failed\n"));
} }
// debug duplicate frames
} else if (filled == 1) {
decoder->FramesDuped++;
Warning(_
("video: display buffer empty, duping frame (%d/%d) %d\n"),
decoder->FramesDuped, decoder->FrameCounter,
atomic_read(&VideoPacketsFilled));
if (!(decoder->FramesDisplayed % 300)) {
VaapiPrintFrames(decoder);
}
} }
} }
} }
@ -3510,16 +3593,6 @@ static void VaapiSyncDisplayFrame(VaapiDecoder * decoder)
|| decoder->FramesDisplayed % 6)) { || decoder->FramesDisplayed % 6)) {
VaapiAdvanceFrame(); VaapiAdvanceFrame();
} }
// debug duplicate frames
filled = atomic_read(&decoder->SurfacesFilled);
if (filled == 1) {
decoder->FramesDuped++;
Warning(_("video: display buffer empty, duping frame (%d/%d)\n"),
decoder->FramesDuped, decoder->FrameCounter);
if (!(decoder->FramesDisplayed % 300)) {
VaapiPrintFrames(decoder);
}
}
VaapiDisplayFrame(); VaapiDisplayFrame();
@ -3528,20 +3601,24 @@ static void VaapiSyncDisplayFrame(VaapiDecoder * decoder)
// //
audio_clock = AudioGetClock(); audio_clock = AudioGetClock();
video_clock = VideoGetClock(); video_clock = VideoGetClock();
filled = atomic_read(&decoder->SurfacesFilled);
// FIXME: audio not known assume 333ms delay // FIXME: audio not known assume 333ms delay
if (decoder->DupNextFrame) { if (decoder->DupNextFrame) {
decoder->DupNextFrame = 0; decoder->DupNextFrame--;
} else if ((uint64_t) audio_clock != AV_NOPTS_VALUE } else if ((uint64_t) audio_clock != AV_NOPTS_VALUE
&& (uint64_t) video_clock != AV_NOPTS_VALUE) { && (uint64_t) video_clock != AV_NOPTS_VALUE) {
// both clocks are known // both clocks are known
if (abs(video_clock - audio_clock) > 5000 * 90) { if (abs(video_clock - audio_clock) > 5000 * 90) {
Debug(3, "video: pts difference too big\n"); Debug(3, "video: pts difference too big\n");
} else if (video_clock > audio_clock + VideoAudioDelay + 80 * 90) {
Debug(3, "video: slow down video\n");
decoder->DupNextFrame += 2;
} else if (video_clock > audio_clock + VideoAudioDelay + 40 * 90) { } else if (video_clock > audio_clock + VideoAudioDelay + 40 * 90) {
Debug(3, "video: slow down video\n"); Debug(3, "video: slow down video\n");
decoder->DupNextFrame = 1; decoder->DupNextFrame++;
} else if (audio_clock + VideoAudioDelay > video_clock + 50 * 90 } else if (audio_clock + VideoAudioDelay > video_clock + 40 * 90
&& filled > 1) { && filled > 1) {
Debug(3, "video: speed up video\n"); Debug(3, "video: speed up video\n");
decoder->DropNextFrame = 1; decoder->DropNextFrame = 1;
@ -3554,10 +3631,11 @@ static void VaapiSyncDisplayFrame(VaapiDecoder * decoder)
static int64_t last_video_clock; static int64_t last_video_clock;
Debug(3, Debug(3,
"video: %09" PRIx64 "-%09" PRIx64 " %4" PRId64 " pts %+dms %" "video: %6" PRId64 " %6" PRId64 " pts %+4d %4" PRId64 " %+4" PRId64
PRId64 "\n", audio_clock, video_clock, " ms %3d bufs\n", video_clock - last_video_clock,
video_clock - last_video_clock, audio_clock - video_clock, (int)(audio_clock - video_clock) / 90,
(int)(audio_clock - video_clock) / 90, AudioGetDelay() / 90); AudioGetDelay() / 90, VideoDeltaPTS / 90,
atomic_read(&VideoPacketsFilled));
last_video_clock = video_clock; last_video_clock = video_clock;
} }
@ -3585,7 +3663,7 @@ static void VaapiSyncRenderFrame(VaapiDecoder * decoder,
if (!(decoder->FramesDisplayed % 300)) { if (!(decoder->FramesDisplayed % 300)) {
VaapiPrintFrames(decoder); VaapiPrintFrames(decoder);
} }
decoder->DropNextFrame = 0; decoder->DropNextFrame--;
return; return;
} }
// if video output buffer is full, wait and display surface. // if video output buffer is full, wait and display surface.
@ -3593,6 +3671,8 @@ static void VaapiSyncRenderFrame(VaapiDecoder * decoder,
while (atomic_read(&decoder->SurfacesFilled) >= VIDEO_SURFACES_MAX - 1) { while (atomic_read(&decoder->SurfacesFilled) >= VIDEO_SURFACES_MAX - 1) {
struct timespec abstime; struct timespec abstime;
pthread_mutex_unlock(&VideoLockMutex);
abstime = decoder->FrameTime; abstime = decoder->FrameTime;
abstime.tv_nsec += 14 * 1000 * 1000; abstime.tv_nsec += 14 * 1000 * 1000;
if (abstime.tv_nsec >= 1000 * 1000 * 1000) { if (abstime.tv_nsec >= 1000 * 1000 * 1000) {
@ -3605,6 +3685,7 @@ static void VaapiSyncRenderFrame(VaapiDecoder * decoder,
pthread_setcancelstate(PTHREAD_CANCEL_ENABLE, NULL); pthread_setcancelstate(PTHREAD_CANCEL_ENABLE, NULL);
pthread_testcancel(); pthread_testcancel();
pthread_mutex_lock(&VideoLockMutex);
// give osd some time slot // give osd some time slot
while (pthread_cond_timedwait(&VideoWakeupCond, &VideoLockMutex, while (pthread_cond_timedwait(&VideoWakeupCond, &VideoLockMutex,
&abstime) != ETIMEDOUT) { &abstime) != ETIMEDOUT) {
@ -3622,51 +3703,6 @@ static void VaapiSyncRenderFrame(VaapiDecoder * decoder,
#endif #endif
} }
#if 0
///
/// Update video pts.
///
/// @param decoder VA-API decoder
/// @param frame frame to display
///
static void VaapiSetPts(VaapiDecoder * decoder, const AVFrame * frame)
{
int64_t pts;
// update video clock
if ((uint64_t) decoder->PTS != AV_NOPTS_VALUE) {
decoder->PTS += decoder->Interlaced ? 40 * 90 : 20 * 90;
}
//pts = frame->best_effort_timestamp;
pts = frame->pkt_pts;
if ((uint64_t) pts == AV_NOPTS_VALUE || !pts) {
// libav: 0.8pre didn't set pts
pts = frame->pkt_dts;
}
if (!pts) {
pts = AV_NOPTS_VALUE;
}
// build a monotonic pts
if ((uint64_t) decoder->PTS != AV_NOPTS_VALUE) {
if (pts - decoder->PTS < -10 * 90) {
pts = AV_NOPTS_VALUE;
}
}
// libav: sets only pkt_dts which can be 0
if ((uint64_t) pts != AV_NOPTS_VALUE) {
if (decoder->PTS != pts) {
Debug(3,
"video: %#012" PRIx64 "->%#012" PRIx64 " %4" PRId64 " pts\n",
decoder->PTS, pts, pts - decoder->PTS);
decoder->PTS = pts;
}
}
}
#endif
/// ///
/// Get VA-API decoder video clock. /// Get VA-API decoder video clock.
/// ///
@ -3702,8 +3738,9 @@ static void VaapiDisplayHandlerThread(void)
struct timespec nowtime; struct timespec nowtime;
VaapiDecoder *decoder; VaapiDecoder *decoder;
decoder = VaapiDecoders[0]; if (!(decoder = VaapiDecoders[0])) { // no stream available
return;
}
// //
// fill frame output ring buffer // fill frame output ring buffer
// //
@ -4643,6 +4680,7 @@ static void VdpauCleanup(VdpauDecoder * decoder)
decoder->SurfaceField = 0; decoder->SurfaceField = 0;
decoder->PTS = AV_NOPTS_VALUE; decoder->PTS = AV_NOPTS_VALUE;
VideoDeltaPTS = 0;
} }
/// ///
@ -5203,9 +5241,13 @@ static void VdpauInit(const char *display_name)
/// ///
static void VdpauExit(void) static void VdpauExit(void)
{ {
if (VdpauDecoders[0]) { int i;
VdpauDelDecoder(VdpauDecoders[0]);
VdpauDecoders[0] = NULL; for (i = 0; i < VdpauDecoderN; ++i) {
if (VdpauDecoders[i]) {
VdpauDelDecoder(VdpauDecoders[i]);
VdpauDecoders[i] = NULL;
}
} }
if (VdpauDevice) { if (VdpauDevice) {
@ -6382,8 +6424,9 @@ static void VdpauAdvanceFrame(void)
// keep use of last surface // keep use of last surface
++decoder->FramesDuped; ++decoder->FramesDuped;
Warning(_ Warning(_
("video: display buffer empty, duping frame (%d/%d)\n"), ("video: display buffer empty, duping frame (%d/%d) %d\n"),
decoder->FramesDuped, decoder->FrameCounter); decoder->FramesDuped, decoder->FrameCounter,
atomic_read(&VideoPacketsFilled));
if (!(decoder->FramesDisplayed % 300)) { if (!(decoder->FramesDisplayed % 300)) {
VdpauPrintFrames(decoder); VdpauPrintFrames(decoder);
} }
@ -6504,7 +6547,6 @@ static void VdpauSyncDisplayFrame(VdpauDecoder * decoder)
|| decoder->FramesDisplayed % 6)) { || decoder->FramesDisplayed % 6)) {
VdpauAdvanceFrame(); VdpauAdvanceFrame();
} }
filled = atomic_read(&decoder->SurfacesFilled);
VdpauDisplayFrame(); VdpauDisplayFrame();
@ -6513,21 +6555,25 @@ static void VdpauSyncDisplayFrame(VdpauDecoder * decoder)
// //
audio_clock = AudioGetClock(); audio_clock = AudioGetClock();
video_clock = VideoGetClock(); video_clock = VideoGetClock();
filled = atomic_read(&decoder->SurfacesFilled);
// FIXME: audio not known assume 333ms delay // FIXME: audio not known assume 333ms delay
if (decoder->DupNextFrame) { if (decoder->DupNextFrame) {
decoder->DupNextFrame = 0; decoder->DupNextFrame--;
} else if ((uint64_t) audio_clock != AV_NOPTS_VALUE } else if ((uint64_t) audio_clock != AV_NOPTS_VALUE
&& (uint64_t) video_clock != AV_NOPTS_VALUE) { && (uint64_t) video_clock != AV_NOPTS_VALUE) {
// both clocks are known // both clocks are known
if (abs(video_clock - audio_clock) > 5000 * 90) { if (abs(video_clock - audio_clock) > 5000 * 90) {
Debug(3, "video: pts difference too big\n"); Debug(3, "video: pts difference too big\n");
} else if (video_clock > audio_clock + VideoAudioDelay + 30 * 90) { } else if (video_clock > audio_clock + VideoAudioDelay + 80 * 90) {
Debug(3, "video: slow down video\n"); Debug(3, "video: slow down video\n");
decoder->DupNextFrame = 1; decoder->DupNextFrame += 2;
} else if (audio_clock + VideoAudioDelay > video_clock + 50 * 90 } else if (video_clock > audio_clock + VideoAudioDelay + 40 * 90) {
&& filled > 1) { Debug(3, "video: slow down video\n");
decoder->DupNextFrame++;
} else if (audio_clock + VideoAudioDelay > video_clock + 40 * 90
&& filled > 1 + 2 * decoder->Interlaced) {
Debug(3, "video: speed up video\n"); Debug(3, "video: speed up video\n");
decoder->DropNextFrame = 1; decoder->DropNextFrame = 1;
} }
@ -6539,10 +6585,11 @@ static void VdpauSyncDisplayFrame(VdpauDecoder * decoder)
static int64_t last_video_clock; static int64_t last_video_clock;
Debug(3, Debug(3,
"video: %09" PRIx64 "-%09" PRIx64 " %4" PRId64 " pts %+dms %" "video: %6" PRId64 " %6" PRId64 " pts %+4d %4" PRId64 " %+4" PRId64
PRId64 "\n", audio_clock, video_clock, " ms %3d bufs\n", video_clock - last_video_clock,
video_clock - last_video_clock, audio_clock - video_clock, (int)(audio_clock - video_clock) / 90,
(int)(audio_clock - video_clock) / 90, AudioGetDelay() / 90); AudioGetDelay() / 90, VideoDeltaPTS / 90,
atomic_read(&VideoPacketsFilled));
last_video_clock = video_clock; last_video_clock = video_clock;
} }
@ -6575,7 +6622,7 @@ static void VdpauSyncRenderFrame(VdpauDecoder * decoder,
if (!(decoder->FramesDisplayed % 300)) { if (!(decoder->FramesDisplayed % 300)) {
VdpauPrintFrames(decoder); VdpauPrintFrames(decoder);
} }
decoder->DropNextFrame = 0; decoder->DropNextFrame--;
return; return;
} }
// if video output buffer is full, wait and display surface. // if video output buffer is full, wait and display surface.
@ -6583,6 +6630,8 @@ static void VdpauSyncRenderFrame(VdpauDecoder * decoder,
while (atomic_read(&decoder->SurfacesFilled) >= VIDEO_SURFACES_MAX) { while (atomic_read(&decoder->SurfacesFilled) >= VIDEO_SURFACES_MAX) {
struct timespec abstime; struct timespec abstime;
pthread_mutex_unlock(&VideoLockMutex);
abstime = decoder->FrameTime; abstime = decoder->FrameTime;
abstime.tv_nsec += 14 * 1000 * 1000; abstime.tv_nsec += 14 * 1000 * 1000;
if (abstime.tv_nsec >= 1000 * 1000 * 1000) { if (abstime.tv_nsec >= 1000 * 1000 * 1000) {
@ -6596,6 +6645,7 @@ static void VdpauSyncRenderFrame(VdpauDecoder * decoder,
// fix dead-lock with VdpauExit // fix dead-lock with VdpauExit
pthread_setcancelstate(PTHREAD_CANCEL_ENABLE, NULL); pthread_setcancelstate(PTHREAD_CANCEL_ENABLE, NULL);
pthread_testcancel(); pthread_testcancel();
pthread_mutex_lock(&VideoLockMutex);
// give osd some time slot // give osd some time slot
while (pthread_cond_timedwait(&VideoWakeupCond, &VideoLockMutex, while (pthread_cond_timedwait(&VideoWakeupCond, &VideoLockMutex,
&abstime) != ETIMEDOUT) { &abstime) != ETIMEDOUT) {
@ -6630,7 +6680,7 @@ static int64_t VdpauGetClock(const VdpauDecoder * decoder)
20 * 90 * (2 * atomic_read(&decoder->SurfacesFilled) 20 * 90 * (2 * atomic_read(&decoder->SurfacesFilled)
- decoder->SurfaceField); - decoder->SurfaceField);
} }
return decoder->PTS - 20 * 90 * (atomic_read(&decoder->SurfacesFilled) - return decoder->PTS - 20 * 90 * (atomic_read(&decoder->SurfacesFilled) +
1); 1);
} }
@ -7615,6 +7665,10 @@ unsigned VideoGetSurface(VideoHwDecoder * decoder)
/// ///
void VideoReleaseSurface(VideoHwDecoder * decoder, unsigned surface) void VideoReleaseSurface(VideoHwDecoder * decoder, unsigned surface)
{ {
// FIXME: must be guarded against calls, after VideoExit
if (!XlibDisplay) { // no init or failed
return;
}
#ifdef USE_VAAPI #ifdef USE_VAAPI
if (VideoVaapiEnabled) { if (VideoVaapiEnabled) {
VaapiReleaseSurface(&decoder->Vaapi, surface); VaapiReleaseSurface(&decoder->Vaapi, surface);
@ -7668,7 +7722,6 @@ enum PixelFormat Video_get_format(VideoHwDecoder * decoder,
static void VideoSetPts(int64_t * pts_p, int interlaced, const AVFrame * frame) static void VideoSetPts(int64_t * pts_p, int interlaced, const AVFrame * frame)
{ {
int64_t pts; int64_t pts;
int64_t delta;
// update video clock // update video clock
if ((uint64_t) * pts_p != AV_NOPTS_VALUE) { if ((uint64_t) * pts_p != AV_NOPTS_VALUE) {
@ -7680,23 +7733,28 @@ static void VideoSetPts(int64_t * pts_p, int interlaced, const AVFrame * frame)
// libav: 0.8pre didn't set pts // libav: 0.8pre didn't set pts
pts = frame->pkt_dts; pts = frame->pkt_dts;
} }
if (!pts) {
pts = AV_NOPTS_VALUE;
}
// libav: sets only pkt_dts which can be 0 // libav: sets only pkt_dts which can be 0
if ((uint64_t) pts != AV_NOPTS_VALUE) { if (pts && (uint64_t) pts != AV_NOPTS_VALUE) {
// build a monotonic pts // build a monotonic pts
if ((uint64_t) * pts_p != AV_NOPTS_VALUE) { if ((uint64_t) * pts_p != AV_NOPTS_VALUE) {
int64_t delta;
delta = pts - *pts_p; delta = pts - *pts_p;
// ignore negative jumps // ignore negative jumps
if (delta > -300 * 90 && delta < -15 * 90) { if (delta > -600 * 90 && delta <= -40 * 90) {
if (-delta > VideoDeltaPTS) {
VideoDeltaPTS = -delta;
Debug(4,
"video: %#012" PRIx64 "->%#012" PRIx64 " delta+%4"
PRId64 " pts\n", *pts_p, pts, pts - *pts_p);
}
return; return;
} }
} }
if (*pts_p != pts) { if (*pts_p != pts) {
Debug(4, Debug(4,
"video: %#012" PRIx64 "->%#012" PRIx64 " %4" PRId64 " pts\n", "video: %#012" PRIx64 "->%#012" PRIx64 " delta=%4" PRId64
*pts_p, pts, pts - *pts_p); " pts\n", *pts_p, pts, pts - *pts_p);
*pts_p = pts; *pts_p = pts;
} }
} }
@ -7972,6 +8030,7 @@ uint8_t *VideoGrab(int *size, int *width, int *height, int write_header)
(void)size; (void)size;
(void)width; (void)width;
(void)height; (void)height;
(void)write_header;
return NULL; return NULL;
} }
@ -8316,6 +8375,9 @@ void VideoSetAutoCrop(int interval, int delay)
VaapiResetAutoCrop(); VaapiResetAutoCrop();
} }
#endif #endif
#else
(void)interval;
(void)delay;
#endif #endif
} }
@ -8501,6 +8563,30 @@ void VideoExit(void)
#include <getopt.h> #include <getopt.h>
int SysLogLevel; ///< show additional debug informations int SysLogLevel; ///< show additional debug informations
uint32_t VideoSwitch; ///< required
uint64_t AudioGetDelay(void) ///< required
{
return 0UL;
}
int64_t AudioGetClock(void) ///< required
{
return AV_NOPTS_VALUE;
}
void FeedKeyPress( __attribute__ ((unused))
const char *x, __attribute__ ((unused))
const char *y, __attribute__ ((unused))
int a, __attribute__ ((unused))
int b)
{
}
int VideoDecode(void)
{
return -1;
}
/// ///
/// Print version. /// Print version.
@ -8536,16 +8622,29 @@ static void PrintUsage(void)
/// ///
int main(int argc, char *const argv[]) int main(int argc, char *const argv[])
{ {
uint32_t start_tick;
uint32_t tick;
int n;
VideoHwDecoder *video_hw_decoder;
SysLogLevel = 0; SysLogLevel = 0;
// //
// Parse command line arguments // Parse command line arguments
// //
for (;;) { for (;;) {
switch (getopt(argc, argv, "hv?-c:d")) { switch (getopt(argc, argv, "hv?-c:dg:")) {
case 'd': // enabled debug case 'd': // enabled debug
++SysLogLevel; ++SysLogLevel;
continue; continue;
case 'g': // geometry
if (VideoSetGeometry(optarg) < 0) {
fprintf(stderr,
_
("Bad formated geometry please use: [=][<width>{xX}<height>][{+-}<xoffset>{+-}<yoffset>]\n"));
return 0;
}
continue;
case EOF: case EOF:
break; break;
@ -8585,25 +8684,36 @@ int main(int argc, char *const argv[])
// //
VideoInit(NULL); VideoInit(NULL);
VideoOsdInit(); VideoOsdInit();
video_hw_decoder = VideoNewHwDecoder();
start_tick = GetMsTicks();
n = 0;
for (;;) { for (;;) {
#if 0
VideoRenderOverlay(); VideoRenderOverlay();
VideoDisplayOverlay(); VideoDisplayOverlay();
glXSwapBuffers(XlibDisplay, VideoWindow); glXSwapBuffers(XlibDisplay, VideoWindow);
GlxCheck(); GlxCheck();
glClear(GL_COLOR_BUFFER_BIT); glClear(GL_COLOR_BUFFER_BIT);
XFlush(XlibDisplay);
XSync(XlibDisplay, False); XSync(XlibDisplay, False);
XFlush(XlibDisplay); XFlush(XlibDisplay);
XSync(XlibDisplay, False); #endif
XFlush(XlibDisplay); #ifdef USE_VAAPI
XSync(XlibDisplay, False); if (VideoVaapiEnabled) {
XFlush(XlibDisplay); VaapiDisplayFrame();
XSync(XlibDisplay, False); }
XFlush(XlibDisplay); #endif
XSync(XlibDisplay, False); #ifdef USE_VDPAU
XFlush(XlibDisplay); if (VideoVdpauEnabled) {
usleep(20 * 1000); VdpauDisplayFrame();
}
#endif
tick = GetMsTicks();
n++;
if (!(n % 100)) {
printf("%d ms / frame\n", (tick - start_tick) / n);
}
usleep(2 * 1000);
} }
VideoExit(); VideoExit();