1
0
mirror of https://github.com/VDR4Arch/vdr.git synced 2023-10-10 13:36:52 +02:00

Adapted cFrameDetector::Analyze() to HD NTSC broadcasts that split frames over several payload units

This commit is contained in:
Klaus Schmidinger 2009-03-27 13:38:59 +01:00
parent e7ea3b3c70
commit 323969e58d
5 changed files with 113 additions and 53 deletions

View File

@ -2421,6 +2421,8 @@ Timo Helkio <timolavi@mbnet.fi>
Derek Kelly (user.vdr@gmail.com) Derek Kelly (user.vdr@gmail.com)
for fixing handling the 'new' indicator in the recordings menu for TS recordings for fixing handling the 'new' indicator in the recordings menu for TS recordings
for reporting a problem with HD NTSC broadcasts that split frames over several payload
units
Marcel Unbehaun <frostworks@gmx.de> Marcel Unbehaun <frostworks@gmx.de>
for adding cRecordingInfo::GetEvent() for adding cRecordingInfo::GetEvent()

View File

@ -5979,7 +5979,7 @@ Video Disk Recorder Revision History
cDevice class reimplements PlayTs() or PlayPes(), it also needs to make sure this cDevice class reimplements PlayTs() or PlayPes(), it also needs to make sure this
new function works as expected. new function works as expected.
2009-03-13: Version 1.7.5 2009-03-27: Version 1.7.5
- Fixed a hangup when replaying a TS recording with subtitles activated (reported - Fixed a hangup when replaying a TS recording with subtitles activated (reported
by Timo Helkio). by Timo Helkio).
@ -6000,3 +6000,6 @@ Video Disk Recorder Revision History
+ The full-featured DVB cards need an improved firmware in order to return + The full-featured DVB cards need an improved firmware in order to return
proper STC values in trick modes (thanks to Oliver Endriss for enhancing the proper STC values in trick modes (thanks to Oliver Endriss for enhancing the
av7110 firmware). av7110 firmware).
- Adapted cFrameDetector::Analyze() to HD NTSC broadcasts that split frames over
several payload units (thanks to Derek Kelly for reporting this and helping in
testing).

View File

@ -4,7 +4,7 @@
* See the main source file 'vdr.c' for copyright information and * See the main source file 'vdr.c' for copyright information and
* how to reach the author. * how to reach the author.
* *
* $Id: recorder.c 2.2 2009/01/23 16:44:29 kls Exp $ * $Id: recorder.c 2.3 2009/03/20 15:49:02 kls Exp $
*/ */
#include "recorder.h" #include "recorder.h"
@ -113,7 +113,6 @@ void cRecorder::Receive(uchar *Data, int Length)
void cRecorder::Action(void) void cRecorder::Action(void)
{ {
time_t t = time(NULL); time_t t = time(NULL);
bool Synced = false;
bool InfoWritten = false; bool InfoWritten = false;
while (Running()) { while (Running()) {
int r; int r;
@ -123,7 +122,7 @@ void cRecorder::Action(void)
if (Count) { if (Count) {
if (!Running() && frameDetector->IndependentFrame()) // finish the recording before the next independent frame if (!Running() && frameDetector->IndependentFrame()) // finish the recording before the next independent frame
break; break;
if (Synced |= frameDetector->IndependentFrame()) { // start with first independent frame if (frameDetector->Synced()) {
if (!InfoWritten) { if (!InfoWritten) {
if (recordingInfo.Read()) { if (recordingInfo.Read()) {
if (frameDetector->FramesPerSecond() > 0 && recordingInfo.FramesPerSecond() != frameDetector->FramesPerSecond()) { if (frameDetector->FramesPerSecond() > 0 && recordingInfo.FramesPerSecond() != frameDetector->FramesPerSecond()) {

131
remux.c
View File

@ -4,7 +4,7 @@
* See the main source file 'vdr.c' for copyright information and * See the main source file 'vdr.c' for copyright information and
* how to reach the author. * how to reach the author.
* *
* $Id: remux.c 2.14 2009/03/08 12:12:17 kls Exp $ * $Id: remux.c 2.15 2009/03/27 13:32:11 kls Exp $
*/ */
#include "remux.h" #include "remux.h"
@ -676,15 +676,25 @@ cFrameDetector::cFrameDetector(int Pid, int Type)
{ {
pid = Pid; pid = Pid;
type = Type; type = Type;
synced = false;
newFrame = independentFrame = false; newFrame = independentFrame = false;
lastPts = 0; numPtsValues = 0;
numIFrames = 0;
isVideo = type == 0x02 || type == 0x1B; // MPEG 2 or MPEG 4 isVideo = type == 0x02 || type == 0x1B; // MPEG 2 or MPEG 4
frameDuration = 0; frameDuration = 0;
framesPerPayloadUnit = 0; framesInPayloadUnit = framesPerPayloadUnit = 0;
payloadUnitOfFrame = 0;
scanning = false; scanning = false;
scanner = 0; scanner = 0;
} }
static int CmpUint32(const void *p1, const void *p2)
{
if (*(uint32_t *)p1 < *(uint32_t *)p2) return -1;
if (*(uint32_t *)p1 > *(uint32_t *)p2) return 1;
return 0;
}
int cFrameDetector::Analyze(const uchar *Data, int Length) int cFrameDetector::Analyze(const uchar *Data, int Length)
{ {
newFrame = independentFrame = false; newFrame = independentFrame = false;
@ -692,56 +702,79 @@ int cFrameDetector::Analyze(const uchar *Data, int Length)
if (TsHasPayload(Data) && !TsIsScrambled(Data) && TsPid(Data) == pid) { if (TsHasPayload(Data) && !TsIsScrambled(Data) && TsPid(Data) == pid) {
if (TsPayloadStart(Data)) { if (TsPayloadStart(Data)) {
if (!frameDuration) { if (!frameDuration) {
const uchar *Pes = Data + TsPayloadOffset(Data); // frame duration unknown, so collect a sequenece of PTS values:
if (PesHasPts(Pes)) { if (numPtsValues < MaxPtsValues && numIFrames < 2) { // collect a sequence containing at least two I-frames
int64_t Pts = PesGetPts(Pes); const uchar *Pes = Data + TsPayloadOffset(Data);
if (Pts < lastPts) { // avoid wrapping if (PesHasPts(Pes)) {
lastPts = 0; ptsValues[numPtsValues] = PesGetPts(Pes);
framesPerPayloadUnit = 0; // check for rollover:
} if (numPtsValues && ptsValues[numPtsValues - 1] > 0xF0000000 && ptsValues[numPtsValues] < 0x10000000) {
if ((!lastPts || !framesPerPayloadUnit) && Pts != lastPts) dbgframes("#");
lastPts = Pts; numPtsValues = 0;
else { numIFrames = 0;
int64_t Delta = Pts - lastPts;
if (isVideo) {
if (Delta % 3600 == 0)
frameDuration = 3600; // PAL, 25 fps
else if (Delta % 3003 == 0)
frameDuration = 3003; // NTSC, 29.97 fps
else {
frameDuration = 3600; // unknown, assuming 25 fps
dsyslog("unknown frame duration, assuming 25 fps (PTS: %lld - %lld = %lld FPPU = %d)\n", Pts, lastPts, Delta, framesPerPayloadUnit);
}
} }
else // audio else
frameDuration = Delta; // PTS of audio frames is always increasing numPtsValues++;
dbgframes("PTS: %lld - %lld = %lld -> FD = %d FPS = %5.2f FPPU = %d\n", Pts, lastPts, Delta, frameDuration, 90000.0 / frameDuration, framesPerPayloadUnit);
} }
} }
else {
// find the smallest PTS delta:
qsort(ptsValues, numPtsValues, sizeof(uint32_t), CmpUint32);
numPtsValues--;
for (int i = 0; i < numPtsValues; i++)
ptsValues[i] = ptsValues[i + 1] - ptsValues[i];
qsort(ptsValues, numPtsValues, sizeof(uint32_t), CmpUint32);
uint32_t Delta = ptsValues[0];
// determine frame info:
if (isVideo) {
if (Delta % 3600 == 0)
frameDuration = 3600; // PAL, 25 fps
else if (Delta % 3003 == 0)
frameDuration = 3003; // NTSC, 29.97 fps
else if (Delta == 1501) {
frameDuration = 3003; // NTSC, 29.97 fps
framesPerPayloadUnit = -2;
}
else {
frameDuration = 3600; // unknown, assuming 25 fps
dsyslog("unknown frame duration (%d), assuming 25 fps", Delta);
}
}
else // audio
frameDuration = Delta; // PTS of audio frames is always increasing
dbgframes("\nframe duration = %d FPS = %5.2f FPPU = %d\n", frameDuration, 90000.0 / frameDuration, framesPerPayloadUnit);
}
} }
scanner = 0; scanner = 0;
scanning = true; scanning = true;
} }
if (scanning) { if (scanning) {
int PayloadOffset = TsPayloadOffset(Data); int PayloadOffset = TsPayloadOffset(Data);
if (TsPayloadStart(Data)) if (TsPayloadStart(Data)) {
PayloadOffset += PesPayloadOffset(Data + PayloadOffset); PayloadOffset += PesPayloadOffset(Data + PayloadOffset);
if (!framesPerPayloadUnit)
framesPerPayloadUnit = framesInPayloadUnit;
if (DebugFrames && !synced)
dbgframes("/");
}
for (int i = PayloadOffset; i < TS_SIZE; i++) { for (int i = PayloadOffset; i < TS_SIZE; i++) {
scanner <<= 8; scanner <<= 8;
scanner |= Data[i]; scanner |= Data[i];
switch (type) { switch (type) {
case 0x02: // MPEG 2 video case 0x02: // MPEG 2 video
if (scanner == 0x00000100) { // Picture Start Code if (scanner == 0x00000100) { // Picture Start Code
if (frameDuration) { newFrame = true;
newFrame = true; independentFrame = ((Data[i + 2] >> 3) & 0x07) == 1; // I-Frame
independentFrame = ((Data[i + 2] >> 3) & 0x07) == 1; // I-Frame if (synced) {
if (framesPerPayloadUnit == 1) { if (framesPerPayloadUnit <= 1) {
scanning = false; scanning = false;
return TS_SIZE; return TS_SIZE;
} }
} }
else { else {
framesPerPayloadUnit++; framesInPayloadUnit++;
if (independentFrame)
numIFrames++;
dbgframes("%d ", (Data[i + 2] >> 3) & 0x07); dbgframes("%d ", (Data[i + 2] >> 3) & 0x07);
} }
scanner = 0; scanner = 0;
@ -749,16 +782,25 @@ int cFrameDetector::Analyze(const uchar *Data, int Length)
break; break;
case 0x1B: // MPEG 4 video case 0x1B: // MPEG 4 video
if (scanner == 0x00000109) { // Access Unit Delimiter if (scanner == 0x00000109) { // Access Unit Delimiter
if (frameDuration) { newFrame = true;
newFrame = true; independentFrame = Data[i + 1] == 0x10;
independentFrame = Data[i + 1] == 0x10; if (synced) {
if (framesPerPayloadUnit == 1) { if (framesPerPayloadUnit < 0) {
payloadUnitOfFrame = (payloadUnitOfFrame + 1) % -framesPerPayloadUnit;
if (payloadUnitOfFrame != 0 && independentFrame)
payloadUnitOfFrame = 0;
if (payloadUnitOfFrame)
newFrame = false;
}
if (framesPerPayloadUnit <= 1) {
scanning = false; scanning = false;
return TS_SIZE; return TS_SIZE;
} }
} }
else { else {
framesPerPayloadUnit++; framesInPayloadUnit++;
if (independentFrame)
numIFrames++;
dbgframes("%02X ", Data[i + 1]); dbgframes("%02X ", Data[i + 1]);
} }
scanner = 0; scanner = 0;
@ -766,18 +808,23 @@ int cFrameDetector::Analyze(const uchar *Data, int Length)
break; break;
case 0x04: // MPEG audio case 0x04: // MPEG audio
case 0x06: // AC3 audio case 0x06: // AC3 audio
if (frameDuration) { newFrame = true;
newFrame = true; independentFrame = true;
independentFrame = true; if (synced)
scanning = false; scanning = false;
else {
framesInPayloadUnit = 1;
numIFrames++;
} }
else
framesPerPayloadUnit = 1;
break; break;
default: esyslog("ERROR: unknown stream type %d (PID %d) in frame detector", type, pid); default: esyslog("ERROR: unknown stream type %d (PID %d) in frame detector", type, pid);
pid = 0; // let's just ignore any further data pid = 0; // let's just ignore any further data
} }
} }
if (!synced && frameDuration && independentFrame) {
synced = true;
dbgframes("*");
}
} }
} }
return TS_SIZE; return TS_SIZE;

23
remux.h
View File

@ -4,7 +4,7 @@
* See the main source file 'vdr.c' for copyright information and * See the main source file 'vdr.c' for copyright information and
* how to reach the author. * how to reach the author.
* *
* $Id: remux.h 2.8 2009/03/08 12:05:12 kls Exp $ * $Id: remux.h 2.9 2009/03/27 13:38:59 kls Exp $
*/ */
#ifndef __REMUX_H #ifndef __REMUX_H
@ -267,14 +267,22 @@ void PesDump(const char *Name, const u_char *Data, int Length);
class cFrameDetector { class cFrameDetector {
private: private:
enum { MaxPtsValues = 150 };
int pid; int pid;
int type; int type;
bool synced;
bool newFrame; bool newFrame;
bool independentFrame; bool independentFrame;
int64_t lastPts; uint32_t ptsValues[MaxPtsValues]; // 32 bit is enough - we only need the delta
int numPtsValues;
int numIFrames;
bool isVideo; bool isVideo;
int frameDuration; int frameDuration;
int framesPerPayloadUnit; int framesInPayloadUnit;
int framesPerPayloadUnit; // Some broadcasters send one frame per payload unit (== 1),
// some put an entire GOP into one payload unit (> 1), and
// some spread a single frame over several payload units (< 0).
int payloadUnitOfFrame;
bool scanning; bool scanning;
uint32_t scanner; uint32_t scanner;
public: public:
@ -282,10 +290,11 @@ public:
int Analyze(const uchar *Data, int Length); int Analyze(const uchar *Data, int Length);
///< Analyzes the TS packets pointed to by Data. Length is the number of ///< Analyzes the TS packets pointed to by Data. Length is the number of
///< bytes Data points to, and must be a multiple of 188. ///< bytes Data points to, and must be a multiple of 188.
///< Returns the number of bytes that have been analyzed and may be written ///< Returns the number of bytes that have been analyzed.
///< to the recording file. If the return value is 0, the data was not ///< If the return value is 0, the data was not sufficient for analyzing and
///< sufficient for analyzing and Analyze() needs to be called again with ///< Analyze() needs to be called again with more actual data.
///< more actual data. bool Synced(void) { return synced; }
///< Returns true if the frame detector has synced on the data stream.
bool NewFrame(void) { return newFrame; } bool NewFrame(void) { return newFrame; }
///< Returns true if the data given to the last call to Analyze() started a ///< Returns true if the data given to the last call to Analyze() started a
///< new frame. ///< new frame.