Modified cFrameDetector::Analyze() to minimize file I/O overhead during recording

This commit is contained in:
Klaus Schmidinger 2009-03-27 14:11:43 +01:00
parent 323969e58d
commit e3de25dd11
3 changed files with 139 additions and 128 deletions

View File

@ -2426,3 +2426,6 @@ Derek Kelly (user.vdr@gmail.com)
Marcel Unbehaun <frostworks@gmx.de> Marcel Unbehaun <frostworks@gmx.de>
for adding cRecordingInfo::GetEvent() for adding cRecordingInfo::GetEvent()
Günter Niedermeier <linuxtv@ncs-online.de>
for reporting a problem with file I/O overhead during recording in TS format

View File

@ -6003,3 +6003,6 @@ Video Disk Recorder Revision History
- Adapted cFrameDetector::Analyze() to HD NTSC broadcasts that split frames over - Adapted cFrameDetector::Analyze() to HD NTSC broadcasts that split frames over
several payload units (thanks to Derek Kelly for reporting this and helping in several payload units (thanks to Derek Kelly for reporting this and helping in
testing). testing).
- Modified cFrameDetector::Analyze() to make it process whole frames at once, so
that file I/O overhead is minimized during recording (reported by Günter
Niedermeier).

261
remux.c
View File

@ -4,7 +4,7 @@
* See the main source file 'vdr.c' for copyright information and * See the main source file 'vdr.c' for copyright information and
* how to reach the author. * how to reach the author.
* *
* $Id: remux.c 2.15 2009/03/27 13:32:11 kls Exp $ * $Id: remux.c 2.16 2009/03/27 13:49:58 kls Exp $
*/ */
#include "remux.h" #include "remux.h"
@ -697,137 +697,142 @@ static int CmpUint32(const void *p1, const void *p2)
int cFrameDetector::Analyze(const uchar *Data, int Length) int cFrameDetector::Analyze(const uchar *Data, int Length)
{ {
int Processed = 0;
newFrame = independentFrame = false; newFrame = independentFrame = false;
if (Length >= TS_SIZE) { while (Length >= TS_SIZE) {
if (TsHasPayload(Data) && !TsIsScrambled(Data) && TsPid(Data) == pid) { if (TsHasPayload(Data) && !TsIsScrambled(Data) && TsPid(Data) == pid) {
if (TsPayloadStart(Data)) {
if (!frameDuration) {
// frame duration unknown, so collect a sequenece of PTS values:
if (numPtsValues < MaxPtsValues && numIFrames < 2) { // collect a sequence containing at least two I-frames
const uchar *Pes = Data + TsPayloadOffset(Data);
if (PesHasPts(Pes)) {
ptsValues[numPtsValues] = PesGetPts(Pes);
// check for rollover:
if (numPtsValues && ptsValues[numPtsValues - 1] > 0xF0000000 && ptsValues[numPtsValues] < 0x10000000) {
dbgframes("#");
numPtsValues = 0;
numIFrames = 0;
}
else
numPtsValues++;
}
}
else {
// find the smallest PTS delta:
qsort(ptsValues, numPtsValues, sizeof(uint32_t), CmpUint32);
numPtsValues--;
for (int i = 0; i < numPtsValues; i++)
ptsValues[i] = ptsValues[i + 1] - ptsValues[i];
qsort(ptsValues, numPtsValues, sizeof(uint32_t), CmpUint32);
uint32_t Delta = ptsValues[0];
// determine frame info:
if (isVideo) {
if (Delta % 3600 == 0)
frameDuration = 3600; // PAL, 25 fps
else if (Delta % 3003 == 0)
frameDuration = 3003; // NTSC, 29.97 fps
else if (Delta == 1501) {
frameDuration = 3003; // NTSC, 29.97 fps
framesPerPayloadUnit = -2;
}
else {
frameDuration = 3600; // unknown, assuming 25 fps
dsyslog("unknown frame duration (%d), assuming 25 fps", Delta);
}
}
else // audio
frameDuration = Delta; // PTS of audio frames is always increasing
dbgframes("\nframe duration = %d FPS = %5.2f FPPU = %d\n", frameDuration, 90000.0 / frameDuration, framesPerPayloadUnit);
}
}
scanner = 0;
scanning = true;
}
if (scanning) {
int PayloadOffset = TsPayloadOffset(Data);
if (TsPayloadStart(Data)) { if (TsPayloadStart(Data)) {
PayloadOffset += PesPayloadOffset(Data + PayloadOffset); if (!frameDuration) {
if (!framesPerPayloadUnit) // frame duration unknown, so collect a sequenece of PTS values:
framesPerPayloadUnit = framesInPayloadUnit; if (numPtsValues < MaxPtsValues && numIFrames < 2) { // collect a sequence containing at least two I-frames
if (DebugFrames && !synced) const uchar *Pes = Data + TsPayloadOffset(Data);
dbgframes("/"); if (PesHasPts(Pes)) {
} ptsValues[numPtsValues] = PesGetPts(Pes);
for (int i = PayloadOffset; i < TS_SIZE; i++) { // check for rollover:
scanner <<= 8; if (numPtsValues && ptsValues[numPtsValues - 1] > 0xF0000000 && ptsValues[numPtsValues] < 0x10000000) {
scanner |= Data[i]; dbgframes("#");
switch (type) { numPtsValues = 0;
case 0x02: // MPEG 2 video numIFrames = 0;
if (scanner == 0x00000100) { // Picture Start Code }
newFrame = true; else
independentFrame = ((Data[i + 2] >> 3) & 0x07) == 1; // I-Frame numPtsValues++;
if (synced) { }
if (framesPerPayloadUnit <= 1) { }
scanning = false; else {
return TS_SIZE; // find the smallest PTS delta:
} qsort(ptsValues, numPtsValues, sizeof(uint32_t), CmpUint32);
} numPtsValues--;
else { for (int i = 0; i < numPtsValues; i++)
framesInPayloadUnit++; ptsValues[i] = ptsValues[i + 1] - ptsValues[i];
if (independentFrame) qsort(ptsValues, numPtsValues, sizeof(uint32_t), CmpUint32);
numIFrames++; uint32_t Delta = ptsValues[0];
dbgframes("%d ", (Data[i + 2] >> 3) & 0x07); // determine frame info:
} if (isVideo) {
scanner = 0; if (Delta % 3600 == 0)
} frameDuration = 3600; // PAL, 25 fps
break; else if (Delta % 3003 == 0)
case 0x1B: // MPEG 4 video frameDuration = 3003; // NTSC, 29.97 fps
if (scanner == 0x00000109) { // Access Unit Delimiter else if (Delta == 1501) {
newFrame = true; frameDuration = 3003; // NTSC, 29.97 fps
independentFrame = Data[i + 1] == 0x10; framesPerPayloadUnit = -2;
if (synced) { }
if (framesPerPayloadUnit < 0) { else {
payloadUnitOfFrame = (payloadUnitOfFrame + 1) % -framesPerPayloadUnit; frameDuration = 3600; // unknown, assuming 25 fps
if (payloadUnitOfFrame != 0 && independentFrame) dsyslog("unknown frame duration (%d), assuming 25 fps", Delta);
payloadUnitOfFrame = 0; }
if (payloadUnitOfFrame) }
newFrame = false; else // audio
} frameDuration = Delta; // PTS of audio frames is always increasing
if (framesPerPayloadUnit <= 1) { dbgframes("\nframe duration = %d FPS = %5.2f FPPU = %d\n", frameDuration, 90000.0 / frameDuration, framesPerPayloadUnit);
scanning = false; }
return TS_SIZE; }
} scanner = 0;
} scanning = true;
else { }
framesInPayloadUnit++; if (scanning) {
if (independentFrame) int PayloadOffset = TsPayloadOffset(Data);
numIFrames++; if (TsPayloadStart(Data)) {
dbgframes("%02X ", Data[i + 1]); PayloadOffset += PesPayloadOffset(Data + PayloadOffset);
} if (!framesPerPayloadUnit)
scanner = 0; framesPerPayloadUnit = framesInPayloadUnit;
} if (DebugFrames && !synced)
break; dbgframes("/");
case 0x04: // MPEG audio }
case 0x06: // AC3 audio for (int i = PayloadOffset; i < TS_SIZE; i++) {
newFrame = true; scanner <<= 8;
independentFrame = true; scanner |= Data[i];
if (synced) switch (type) {
scanning = false; case 0x02: // MPEG 2 video
else { if (scanner == 0x00000100) { // Picture Start Code
framesInPayloadUnit = 1; if (synced && Processed)
numIFrames++; return Processed;
} newFrame = true;
break; independentFrame = ((Data[i + 2] >> 3) & 0x07) == 1; // I-Frame
default: esyslog("ERROR: unknown stream type %d (PID %d) in frame detector", type, pid); if (synced) {
pid = 0; // let's just ignore any further data if (framesPerPayloadUnit <= 1)
scanning = false;
}
else {
framesInPayloadUnit++;
if (independentFrame)
numIFrames++;
dbgframes("%d ", (Data[i + 2] >> 3) & 0x07);
}
scanner = 0;
}
break;
case 0x1B: // MPEG 4 video
if (scanner == 0x00000109) { // Access Unit Delimiter
if (synced && Processed)
return Processed;
newFrame = true;
independentFrame = Data[i + 1] == 0x10;
if (synced) {
if (framesPerPayloadUnit < 0) {
payloadUnitOfFrame = (payloadUnitOfFrame + 1) % -framesPerPayloadUnit;
if (payloadUnitOfFrame != 0 && independentFrame)
payloadUnitOfFrame = 0;
if (payloadUnitOfFrame)
newFrame = false;
}
if (framesPerPayloadUnit <= 1)
scanning = false;
}
else {
framesInPayloadUnit++;
if (independentFrame)
numIFrames++;
dbgframes("%02X ", Data[i + 1]);
}
scanner = 0;
}
break;
case 0x04: // MPEG audio
case 0x06: // AC3 audio
if (synced && Processed)
return Processed;
newFrame = true;
independentFrame = true;
if (synced)
scanning = false;
else {
framesInPayloadUnit = 1;
numIFrames++;
}
break;
default: esyslog("ERROR: unknown stream type %d (PID %d) in frame detector", type, pid);
pid = 0; // let's just ignore any further data
}
}
if (!synced && frameDuration && independentFrame) {
synced = true;
dbgframes("*");
} }
}
if (!synced && frameDuration && independentFrame) {
synced = true;
dbgframes("*");
} }
} }
Data += TS_SIZE;
Length -= TS_SIZE;
Processed += TS_SIZE;
} }
return TS_SIZE; return Processed;
}
return 0;
} }