Version 1.7.5

- Fixed a hangup when replaying a TS recording with subtitles activated (reported
  by Timo Helkio).
- Fixed handling the 'new' indicator in the recordings menu for TS recordings
  (thanks to Derek Kelly).
- Added cap_sys_nice to the capabilities that are not dropped (thanks to Rolf
  Ahrenberg).
- Updated the Italian OSD texts (thanks to Diego Pierotto).
- Added cRecordingInfo::GetEvent() (thanks to Marcel Unbehaun).
- Improved synchronizing the progress display, trick modes and subtitle display
  to the actual audio/video. This now works independent of any buffer sizes the
  output device might use.
  + The cBackTrace class has been replaced with cPtsIndex, which keeps track
    of the PTS timestamps of recently played frames.
  + cDevice::GetSTC() is now required to deliver the STC even in trick modes.
    It is sufficient if it returns the PTS of the most recently presented
    audio/video frame.
  + The full-featured DVB cards need an improved firmware in order to return
    proper STC values in trick modes (thanks to Oliver Endriss for enhancing the
    av7110 firmware).
- Adapted cFrameDetector::Analyze() to HD NTSC broadcasts that split frames over
  several payload units (thanks to Derek Kelly for reporting this and helping in
  testing).
- Modified cFrameDetector::Analyze() to make it process whole frames at once, so
  that file I/O overhead is minimized during recording (reported by Günter
  Niedermeier).
- Added command line help for the '-i' option.
- Fixed cDvbPlayer::NextFile() to handle files larger than 2GB (thanks to Jose
  Alberto Reguero).
- Improved replay at the begin and end of a recording. The very first and very last
  frame is now sent to the output device repeatedly until GetSTC() reports that it
  has been played. cDvbPlayer::Action() no longer calls DeviceFlush() (thanks to
  Reinhard Nissl for making sure vdr-xine no longer needs this).
- Added missing '[]' to the delete operator in cMenuEditStrItem::~cMenuEditStrItem().
- Added missing virtual destructor to cPalette.
- Now freeing configDirectory before setting it to a new value in
  cPlugin::SetConfigDirectory().
- Fixed a crash when jumping to an editing mark in an audio recording.
- Fixed the 'VideoOnly' condition in the PlayPes() and PlayTs() calls in
  cDvbPlayer::Action() (thanks to Reinhard Nissl).
- cDevice::PlayTs() now plays as many TS packets as possible in one call.
- Making sure any floating point numbers written use a decimal point (thanks to
  Oliver Endriss for pointing out a problem with the F record in the info file of
  a recording).
- Fixed detecting the frame rate for radio recordings.
- Added missing AUDIO_PAUSE/AUDIO_CONTINUE calls to cDvbDevice (thanks to Oliver
  Endriss).
- No longer writing the video type into channels.conf if VPID is 0 (thanks to
  Oliver Endriss for reporting this).
- Improved efficiency of cEIT::cEIT() (thanks to Tobias Bratfisch).
This commit is contained in:
Klaus Schmidinger
2009-04-12 22:29:35 +02:00
parent 084e16c057
commit 1aadb31fb3
24 changed files with 556 additions and 318 deletions

203
remux.c
View File

@@ -4,7 +4,7 @@
* See the main source file 'vdr.c' for copyright information and
* how to reach the author.
*
* $Id: remux.c 2.13 2009/01/24 13:44:45 kls Exp $
* $Id: remux.c 2.17 2009/04/05 14:07:48 kls Exp $
*/
#include "remux.h"
@@ -109,6 +109,21 @@ void cRemux::SetBrokenLink(uchar *Data, int Length)
dsyslog("SetBrokenLink: no video packet in frame");
}
// --- Some TS handling tools ------------------------------------------------
int64_t TsGetPts(const uchar *p, int l)
{
// Find the first packet with a PTS and use it:
while (l > 0) {
const uchar *d = p;
if (TsPayloadStart(d) && TsGetPayload(&d) && PesHasPts(d))
return PesGetPts(d);
p += TS_SIZE;
l -= TS_SIZE;
}
return -1;
}
// --- cPatPmtGenerator ------------------------------------------------------
cPatPmtGenerator::cPatPmtGenerator(cChannel *Channel)
@@ -661,111 +676,163 @@ cFrameDetector::cFrameDetector(int Pid, int Type)
{
pid = Pid;
type = Type;
synced = false;
newFrame = independentFrame = false;
lastPts = 0;
numPtsValues = 0;
numIFrames = 0;
isVideo = type == 0x02 || type == 0x1B; // MPEG 2 or MPEG 4
frameDuration = 0;
framesPerPayloadUnit = 0;
framesInPayloadUnit = framesPerPayloadUnit = 0;
payloadUnitOfFrame = 0;
scanning = false;
scanner = 0;
}
static int CmpUint32(const void *p1, const void *p2)
{
if (*(uint32_t *)p1 < *(uint32_t *)p2) return -1;
if (*(uint32_t *)p1 > *(uint32_t *)p2) return 1;
return 0;
}
int cFrameDetector::Analyze(const uchar *Data, int Length)
{
int Processed = 0;
newFrame = independentFrame = false;
if (Length >= TS_SIZE) {
if (TsHasPayload(Data) && !TsIsScrambled(Data) && TsPid(Data) == pid) {
if (TsPayloadStart(Data)) {
if (!frameDuration) {
const uchar *Pes = Data + TsPayloadOffset(Data);
if (PesHasPts(Pes)) {
int64_t Pts = PesGetPts(Pes);
if (Pts < lastPts) { // avoid wrapping
lastPts = 0;
framesPerPayloadUnit = 0;
while (Length >= TS_SIZE) {
if (TsHasPayload(Data) && !TsIsScrambled(Data) && TsPid(Data) == pid) {
if (TsPayloadStart(Data)) {
if (!frameDuration) {
// frame duration unknown, so collect a sequenece of PTS values:
if (numPtsValues < MaxPtsValues && numIFrames < 2) { // collect a sequence containing at least two I-frames
const uchar *Pes = Data + TsPayloadOffset(Data);
if (PesHasPts(Pes)) {
ptsValues[numPtsValues] = PesGetPts(Pes);
// check for rollover:
if (numPtsValues && ptsValues[numPtsValues - 1] > 0xF0000000 && ptsValues[numPtsValues] < 0x10000000) {
dbgframes("#");
numPtsValues = 0;
numIFrames = 0;
}
else
numPtsValues++;
}
}
if ((!lastPts || !framesPerPayloadUnit) && Pts != lastPts)
lastPts = Pts;
else {
int64_t Delta = Pts - lastPts;
// find the smallest PTS delta:
qsort(ptsValues, numPtsValues, sizeof(uint32_t), CmpUint32);
numPtsValues--;
for (int i = 0; i < numPtsValues; i++)
ptsValues[i] = ptsValues[i + 1] - ptsValues[i];
qsort(ptsValues, numPtsValues, sizeof(uint32_t), CmpUint32);
uint32_t Delta = ptsValues[0];
// determine frame info:
if (isVideo) {
if (Delta % 3600 == 0)
frameDuration = 3600; // PAL, 25 fps
else if (Delta % 3003 == 0)
frameDuration = 3003; // NTSC, 29.97 fps
else if (Delta == 1501) {
frameDuration = 3003; // NTSC, 29.97 fps
framesPerPayloadUnit = -2;
}
else {
frameDuration = 3600; // unknown, assuming 25 fps
dsyslog("unknown frame duration, assuming 25 fps (PTS: %lld - %lld = %lld FPPU = %d)\n", Pts, lastPts, Delta, framesPerPayloadUnit);
dsyslog("unknown frame duration (%d), assuming 25 fps", Delta);
}
}
else // audio
frameDuration = Delta; // PTS of audio frames is always increasing
dbgframes("PTS: %lld - %lld = %lld -> FD = %d FPS = %5.2f FPPU = %d\n", Pts, lastPts, Delta, frameDuration, 90000.0 / frameDuration, framesPerPayloadUnit);
dbgframes("\nframe duration = %d FPS = %5.2f FPPU = %d\n", frameDuration, 90000.0 / frameDuration, framesPerPayloadUnit);
}
}
scanner = 0;
scanning = true;
}
scanner = 0;
scanning = true;
}
if (scanning) {
int PayloadOffset = TsPayloadOffset(Data);
if (TsPayloadStart(Data))
PayloadOffset += PesPayloadOffset(Data + PayloadOffset);
for (int i = PayloadOffset; i < TS_SIZE; i++) {
scanner <<= 8;
scanner |= Data[i];
switch (type) {
case 0x02: // MPEG 2 video
if (scanner == 0x00000100) { // Picture Start Code
if (frameDuration) {
if (scanning) {
int PayloadOffset = TsPayloadOffset(Data);
if (TsPayloadStart(Data)) {
PayloadOffset += PesPayloadOffset(Data + PayloadOffset);
if (!framesPerPayloadUnit)
framesPerPayloadUnit = framesInPayloadUnit;
if (DebugFrames && !synced)
dbgframes("/");
}
for (int i = PayloadOffset; scanning && i < TS_SIZE; i++) {
scanner <<= 8;
scanner |= Data[i];
switch (type) {
case 0x02: // MPEG 2 video
if (scanner == 0x00000100) { // Picture Start Code
if (synced && Processed)
return Processed;
newFrame = true;
independentFrame = ((Data[i + 2] >> 3) & 0x07) == 1; // I-Frame
if (framesPerPayloadUnit == 1) {
scanning = false;
return TS_SIZE;
if (synced) {
if (framesPerPayloadUnit <= 1)
scanning = false;
}
else {
framesInPayloadUnit++;
if (independentFrame)
numIFrames++;
dbgframes("%d ", (Data[i + 2] >> 3) & 0x07);
}
scanner = 0;
}
else {
framesPerPayloadUnit++;
dbgframes("%d ", (Data[i + 2] >> 3) & 0x07);
}
scanner = 0;
}
break;
case 0x1B: // MPEG 4 video
if (scanner == 0x00000109) { // Access Unit Delimiter
if (frameDuration) {
break;
case 0x1B: // MPEG 4 video
if (scanner == 0x00000109) { // Access Unit Delimiter
if (synced && Processed)
return Processed;
newFrame = true;
independentFrame = Data[i + 1] == 0x10;
if (framesPerPayloadUnit == 1) {
scanning = false;
return TS_SIZE;
if (synced) {
if (framesPerPayloadUnit < 0) {
payloadUnitOfFrame = (payloadUnitOfFrame + 1) % -framesPerPayloadUnit;
if (payloadUnitOfFrame != 0 && independentFrame)
payloadUnitOfFrame = 0;
if (payloadUnitOfFrame)
newFrame = false;
}
if (framesPerPayloadUnit <= 1)
scanning = false;
}
else {
framesInPayloadUnit++;
if (independentFrame)
numIFrames++;
dbgframes("%02X ", Data[i + 1]);
}
scanner = 0;
}
else {
framesPerPayloadUnit++;
dbgframes("%02X ", Data[i + 1]);
}
scanner = 0;
}
break;
case 0x04: // MPEG audio
case 0x06: // AC3 audio
if (frameDuration) {
break;
case 0x04: // MPEG audio
case 0x06: // AC3 audio
if (synced && Processed)
return Processed;
newFrame = true;
independentFrame = true;
if (!synced) {
framesInPayloadUnit = 1;
if (TsPayloadStart(Data))
numIFrames++;
}
scanning = false;
}
else
framesPerPayloadUnit = 1;
break;
default: esyslog("ERROR: unknown stream type %d (PID %d) in frame detector", type, pid);
pid = 0; // let's just ignore any further data
break;
default: esyslog("ERROR: unknown stream type %d (PID %d) in frame detector", type, pid);
pid = 0; // let's just ignore any further data
}
}
if (!synced && frameDuration && independentFrame) {
synced = true;
dbgframes("*");
}
}
}
}
Data += TS_SIZE;
Length -= TS_SIZE;
Processed += TS_SIZE;
}
return TS_SIZE;
}
return 0;
return Processed;
}