1
0
mirror of https://github.com/rofafor/vdr-plugin-femon.git synced 2023-10-10 13:36:53 +02:00

Fixed bitstream parser for H.264.

This commit is contained in:
Rolf Ahrenberg 2009-08-26 16:24:49 +03:00
parent a06ae6f6ff
commit 61ff96556e
4 changed files with 180 additions and 181 deletions

4
README
View File

@ -26,9 +26,7 @@ transponder and stream information are also available in advanced display modes.
The plugin is based on a neat console frontend status monitor application The plugin is based on a neat console frontend status monitor application
called 'femon' by Johannes Stezenbach (see DVB-apps/szap/femon.c for further called 'femon' by Johannes Stezenbach (see DVB-apps/szap/femon.c for further
information). The bitrate calculation trick originates from the 'dvbstream' information).
application by Dave Chapman and the H.264 parsing routines are taken from
vdr-xineliboutput plugin by Petri Hintukainen.
Terminology: Terminology:

View File

@ -10,23 +10,7 @@
#include "femontools.h" #include "femontools.h"
#include "femonh264.h" #include "femonh264.h"
#define NAL_SEI 0x06 // Supplemental Enhancement Information const eVideoAspectRatio cFemonH264::s_AspectRatios[] =
#define NAL_SPS 0x07 // Sequence Parameter Set
#define NAL_AUD 0x09 // Access Unit Delimiter
#define NAL_END_SEQ 0x0A // End of Sequence
#define IS_NAL_SEI(buf) (((buf)[0] == 0x00) && ((buf)[1] == 0x00) && ((buf)[2] == 0x01) && ((buf)[3] == NAL_SEI))
#define IS_NAL_SPS(buf) (((buf)[0] == 0x00) && ((buf)[1] == 0x00) && ((buf)[2] == 0x01) && ((buf)[3] == NAL_SPS))
#define IS_NAL_AUD(buf) (((buf)[0] == 0x00) && ((buf)[1] == 0x00) && ((buf)[2] == 0x01) && ((buf)[3] == NAL_AUD))
#define IS_NAL_END_SEQ(buf) (((buf)[0] == 0x00) && ((buf)[1] == 0x00) && ((buf)[2] == 0x01) && ((buf)[3] == NAL_END_SEQ))
// Picture types
#define NO_PICTURE 0
#define I_FRAME 1
#define P_FRAME 2
#define B_FRAME 3
static const eVideoAspectRatio aspect_ratios[] =
{ {
VIDEO_ASPECT_RATIO_INVALID, VIDEO_ASPECT_RATIO_INVALID,
VIDEO_ASPECT_RATIO_1_1, VIDEO_ASPECT_RATIO_1_1,
@ -47,7 +31,7 @@ static const eVideoAspectRatio aspect_ratios[] =
VIDEO_ASPECT_RATIO_2_1 VIDEO_ASPECT_RATIO_2_1
}; };
static const eVideoFormat video_formats[] = const eVideoFormat cFemonH264::s_VideoFormats[] =
{ {
VIDEO_FORMAT_COMPONENT, VIDEO_FORMAT_COMPONENT,
VIDEO_FORMAT_PAL, VIDEO_FORMAT_PAL,
@ -58,24 +42,126 @@ static const eVideoFormat video_formats[] =
VIDEO_FORMAT_RESERVED VIDEO_FORMAT_RESERVED
}; };
typedef struct {
int width;
int height;
eVideoAspectRatio aspect_ratio;
eVideoFormat format;
} h264_sps_data_t;
typedef struct { cFemonH264::cFemonH264(cFemonVideoIf *videohandler)
double frame_rate; : m_VideoHandler(videohandler)
double bitrate;
eVideoScan scan;
} h264_sei_data_t;
static bool h264_parse_sps(const uint8_t *buf, int len, h264_sps_data_t *sps)
{ {
int profile_idc, pic_order_cnt_type; }
int frame_mbs_only;
int i, j; cFemonH264::~cFemonH264()
{
}
bool cFemonH264::processVideo(const uint8_t *buf, int len)
{
bool aud_found = false, sps_found = false, sei_found = true; // sei currently disabled
if (!m_VideoHandler)
return false;
// skip PES header
if (!PesLongEnough(len))
return false;
buf += PesPayloadOffset(buf);
const uint8_t *start = buf;
const uint8_t *end = start + len;
uint8_t nal_data[len];
for (;;) {
int consumed = 0;
buf = nextStartCode(buf, end);
if (buf >= end)
break;
switch (buf[3] & 0x1F) {
case NAL_AUD:
if (!aud_found) {
switch (buf[4] >> 5) {
case 0: case 3: case 5: // I_FRAME
//Dprintf("H.264: Found NAL AUD at offset %d/%d", buf - start, len);
m_VideoHandler->SetVideoCodec(VIDEO_CODEC_H264);
aud_found = true;
break;
case 1: case 4: case 6: // P_FRAME;
case 2: case 7: // B_FRAME;
default: // NO_PICTURE;
break;
}
}
break;
case NAL_SPS:
if (!sps_found) {
//Dprintf("H.264: Found NAL SPS at offset %d/%d", buf - start, len);
int nal_len = nalUnescape(nal_data, buf + 4, end - buf - 4);
consumed = parseSPS(nal_data, nal_len);
if (consumed > 0)
sps_found = true;
}
break;
case NAL_SEI:
if (!sei_found) {
//Dprintf("H.264: Found NAL SEI at offset %d/%d", buf - start, len);
int nal_len = nalUnescape(nal_data, buf + 4, end - buf - 4);
consumed = parseSEI(nal_data, nal_len);
if (consumed > 0)
sei_found = true;
}
break;
default:
break;
}
if (aud_found && sps_found && sei_found)
break;
buf += consumed + 4;
}
return aud_found;
}
const uint8_t *cFemonH264::nextStartCode(const uint8_t *start, const uint8_t *end)
{
for (end -= 3; start < end; ++start) {
if ((start[0] == 0x00) && (start[1] == 0x00) && (start[2] == 0x01))
return start;
}
return (end + 3);
}
int cFemonH264::nalUnescape(uint8_t *dst, const uint8_t *src, int len)
{
int s = 0, d = 0;
while (s < len) {
if (!src[s] && !src[s + 1]) {
// hit 00 00 xx
dst[d] = dst[d + 1] = 0;
s += 2;
d += 2;
if (src[s] == 3) {
s++; // 00 00 03 xx --> 00 00 xx
if (s >= len)
return d;
}
}
dst[d++] = src[s++];
}
return d;
}
int cFemonH264::parseSPS(const uint8_t *buf, int len)
{
int profile_idc, pic_order_cnt_type, frame_mbs_only, i, j;
unsigned int width = 0, height = 0;
eVideoAspectRatio aspect_ratio = VIDEO_ASPECT_RATIO_INVALID;
eVideoFormat format = VIDEO_FORMAT_INVALID;
cBitStream bs(buf, len); cBitStream bs(buf, len);
profile_idc = bs.getU8(); profile_idc = bs.getU8();
@ -118,16 +204,16 @@ static bool h264_parse_sps(const uint8_t *buf, int len, h264_sps_data_t *sps)
} }
bs.skipUeGolomb(); // ref_frames bs.skipUeGolomb(); // ref_frames
bs.skipBit(); // gaps_in_frame_num_allowed bs.skipBit(); // gaps_in_frame_num_allowed
sps->width = bs.getUeGolomb() + 1; // mbs width = bs.getUeGolomb() + 1; // mbs
sps->height = bs.getUeGolomb() + 1; // mbs height = bs.getUeGolomb() + 1; // mbs
frame_mbs_only = bs.getBit(); frame_mbs_only = bs.getBit();
//Dprintf("H.264 SPS: pic_width: %u mbs", (unsigned int)sps->width); //Dprintf("H.264 SPS: pic_width: %u mbs", width);
//Dprintf("H.264 SPS: pic_height: %u mbs", (unsigned int)sps->height); //Dprintf("H.264 SPS: pic_height: %u mbs", height);
//Dprintf("H.264 SPS: frame only flag: %d", frame_mbs_only); //Dprintf("H.264 SPS: frame only flag: %d", frame_mbs_only);
sps->width *= 16; width *= 16;
sps->height *= 16 * (2 - frame_mbs_only); height *= 16 * (2 - frame_mbs_only);
if (!frame_mbs_only) { if (!frame_mbs_only) {
if (bs.getBit()) { // mb_adaptive_frame_field_flag if (bs.getBit()) { // mb_adaptive_frame_field_flag
@ -143,16 +229,14 @@ static bool h264_parse_sps(const uint8_t *buf, int len, h264_sps_data_t *sps)
uint32_t crop_bottom = bs.getUeGolomb(); uint32_t crop_bottom = bs.getUeGolomb();
//Dprintf("H.264 SPS: cropping %d %d %d %d", crop_left, crop_top, crop_right, crop_bottom); //Dprintf("H.264 SPS: cropping %d %d %d %d", crop_left, crop_top, crop_right, crop_bottom);
sps->width -= 2 * (crop_left + crop_right); width -= 2 * (crop_left + crop_right);
if (frame_mbs_only) if (frame_mbs_only)
sps->height -= 2 * (crop_top + crop_bottom); height -= 2 * (crop_top + crop_bottom);
else else
sps->height -= 4 * (crop_top + crop_bottom); height -= 4 * (crop_top + crop_bottom);
} }
// VUI parameters // VUI parameters
sps->aspect_ratio = VIDEO_ASPECT_RATIO_INVALID;
sps->format = VIDEO_FORMAT_INVALID;
if (bs.getBit()) { // vui_parameters_present_flag if (bs.getBit()) { // vui_parameters_present_flag
if (bs.getBit()) { // aspect_ratio_info_present if (bs.getBit()) { // aspect_ratio_info_present
uint32_t aspect_ratio_idc = bs.getU8(); uint32_t aspect_ratio_idc = bs.getU8();
@ -160,41 +244,44 @@ static bool h264_parse_sps(const uint8_t *buf, int len, h264_sps_data_t *sps)
if (aspect_ratio_idc == 255) { // extended sar if (aspect_ratio_idc == 255) { // extended sar
bs.skipBit(); // sar_width bs.skipBit(); // sar_width
bs.skipBit(); // sar_height bs.skipBit(); // sar_height
sps->aspect_ratio = VIDEO_ASPECT_RATIO_EXTENDED; aspect_ratio = VIDEO_ASPECT_RATIO_EXTENDED;
//Dprintf("H.264 SPS: aspect ratio extended"); //Dprintf("H.264 SPS: aspect ratio extended");
} }
else if (aspect_ratio_idc < sizeof(aspect_ratios) / sizeof(aspect_ratios[0])) { else if (aspect_ratio_idc < sizeof(s_AspectRatios) / sizeof(s_AspectRatios[0])) {
sps->aspect_ratio = aspect_ratios[aspect_ratio_idc]; aspect_ratio = s_AspectRatios[aspect_ratio_idc];
//Dprintf("H.264 SPS: -> aspect ratio %d", sps->aspect_ratio); //Dprintf("H.264 SPS: -> aspect ratio %d", aspect_ratio);
} }
} }
if (bs.getBit()) // overscan_info_present_flag if (bs.getBit()) // overscan_info_present_flag
bs.skipBit(); // overscan_approriate_flag bs.skipBit(); // overscan_approriate_flag
if (bs.getBit()) { // video_signal_type_present_flag if (bs.getBit()) { // video_signal_type_present_flag
uint32_t video_format = bs.getBits(3); uint32_t video_format = bs.getBits(3);
if (video_format < sizeof(video_formats) / sizeof(video_formats[0])) { if (video_format < sizeof(s_VideoFormats) / sizeof(s_VideoFormats[0])) {
sps->format = video_formats[video_format]; format = s_VideoFormats[video_format];
//Dprintf("H.264 SPS: -> video format %d", sps->format); //Dprintf("H.264 SPS: -> video format %d", format);
} }
} }
} }
//Dprintf("H.264 SPS: -> video size %dx%d, aspect %d", sps->width, sps->height, sps->aspect_ratio); //Dprintf("H.264 SPS: -> video size %dx%d, aspect %d", width, height, aspect_ratio);
if (bs.isEOF()) { if (m_VideoHandler) {
//Dprintf("H.264 SPS: not enough data ?"); m_VideoHandler->SetVideoFormat(format);
return false; m_VideoHandler->SetVideoSize(width, height);
m_VideoHandler->SetVideoAspectRatio(aspect_ratio);
} }
return true; return (bs.getIndex() / 8);
} }
static bool h264_parse_sei(const uint8_t *buf, int len, h264_sei_data_t *sei) int cFemonH264::parseSEI(const uint8_t *buf, int len)
{ {
int num_referenced_subseqs, i; int num_referenced_subseqs, i;
double frame_rate = 0, bit_rate = 0;
eVideoScan scan = VIDEO_SCAN_INVALID;
cBitStream bs(buf, len); cBitStream bs(buf, len);
while (!bs.isEOF()) { // sei_message while ((bs.getIndex() * 8 + 16) < len) { // sei_message
int lastByte, payloadSize = 0, payloadType = 0; int lastByte, payloadSize = 0, payloadType = 0;
// last_payload_type_byte // last_payload_type_byte
@ -214,16 +301,16 @@ static bool h264_parse_sei(const uint8_t *buf, int len, h264_sei_data_t *sei)
// ... // ...
// switch (bs.getBits(2)) { // ct_type // switch (bs.getBits(2)) { // ct_type
// case 0: // case 0:
// sei->scan = VIDEO_SCAN_PROGRESSIVE; // scan = VIDEO_SCAN_PROGRESSIVE;
// break; // break;
// case 1: // case 1:
// sei->scan = VIDEO_SCAN_INTERLACED; // scan = VIDEO_SCAN_INTERLACED;
// break; // break;
// case 2: // case 2:
// sei->scan = VIDEO_SCAN_UNKNOWN; // scan = VIDEO_SCAN_UNKNOWN;
// break; // break;
// default: // default:
// sei->scan = VIDEO_SCAN_RESERVED; // scan = VIDEO_SCAN_RESERVED;
// break; // break;
// } // }
// break; // break;
@ -235,8 +322,8 @@ static bool h264_parse_sei(const uint8_t *buf, int len, h264_sei_data_t *sei)
bs.skipBits(32); // sub_seq_duration bs.skipBits(32); // sub_seq_duration
if (bs.getBit()) { // average_rate_flag if (bs.getBit()) { // average_rate_flag
bs.skipBit(); // accurate_statistics_flag bs.skipBit(); // accurate_statistics_flag
sei->bitrate = bs.getU16(); // average_bit_rate bit_rate = bs.getU16(); // average_bit_rate
sei->frame_rate = bs.getU16(); // average_frame_rate frame_rate = bs.getU16(); // average_frame_rate
//Dprintf("H.264 SEI: -> stream bitrate %.1f, frame rate %.1f", sei->bitrate, sei->frame_rate); //Dprintf("H.264 SEI: -> stream bitrate %.1f, frame rate %.1f", sei->bitrate, sei->frame_rate);
} }
num_referenced_subseqs = bs.getUeGolomb(); // num_referenced_subseqs num_referenced_subseqs = bs.getUeGolomb(); // num_referenced_subseqs
@ -248,7 +335,7 @@ static bool h264_parse_sei(const uint8_t *buf, int len, h264_sei_data_t *sei)
break; break;
default: default:
bs.skipBits(payloadSize); bs.skipBits(payloadSize * 8);
break; break;
} }
@ -256,113 +343,11 @@ static bool h264_parse_sei(const uint8_t *buf, int len, h264_sei_data_t *sei)
bs.byteAlign(); bs.byteAlign();
} }
return true; if (m_VideoHandler) {
} m_VideoHandler->SetVideoFramerate(frame_rate);
m_VideoHandler->SetVideoBitrate(bit_rate);
static int h264_nal_unescape(uint8_t *dst, const uint8_t *src, int len) m_VideoHandler->SetVideoScan(scan);
{ }
int s = 0, d = 0;
return (bs.getIndex() / 8);
while (s < len) {
if (!src[s] && !src[s + 1]) {
// hit 00 00 xx
dst[d] = dst[d + 1] = 0;
s += 2;
d += 2;
if (src[s] == 3) {
s++; // 00 00 03 xx --> 00 00 xx
if (s >= len)
return d;
}
}
dst[d++] = src[s++];
}
return d;
}
static int h264_get_picture_type(const uint8_t *buf, int len)
{
for (int i = 0; i < (len - 5); ++i) {
if (buf[i] == 0 && buf[i + 1] == 0 && buf[i + 2] == 1 && buf[i + 3] == NAL_AUD) {
uint8_t type = (uint8_t)(buf[i + 4] >> 5);
switch (type) {
case 0: case 3: case 5: return I_FRAME;
case 1: case 4: case 6: return P_FRAME;
case 2: case 7: return B_FRAME;
default:;
}
}
}
return NO_PICTURE;
}
cFemonH264::cFemonH264(cFemonVideoIf *videohandler)
: m_VideoHandler(videohandler)
{
}
cFemonH264::~cFemonH264()
{
}
bool cFemonH264::processVideo(const uint8_t *buf, int len)
{
bool sps_found = false, sei_found = true; // sei currently disabled
if (!m_VideoHandler)
return false;
// skip PES header
if (!PesLongEnough(len))
return false;
buf += PesPayloadOffset(buf);
// H.264 detection, search for NAL AUD
if (!IS_NAL_AUD(buf))
return false;
// If I-frame, search for NAL SPS
if (h264_get_picture_type(buf, len) != I_FRAME)
return false;
m_VideoHandler->SetVideoCodec(VIDEO_CODEC_H264);
// Scan video packet ...
for (int i = 5; i < len - 4; i++) {
// ... for sequence parameter set
if (!sps_found && (buf[i] == 0x00) && (buf[i + 1] == 0x00) && (buf[i + 2] == 0x01) && (buf[i + 3] & 0x1f) == NAL_SPS) {
uint8_t nal_data[len];
int nal_len;
//Dprintf("H.264: Found NAL SPS at offset %d/%d", i, len);
if (0 < (nal_len = h264_nal_unescape(nal_data, buf + i + 4, len - i - 4))) {
h264_sps_data_t sps = { 0, 0, VIDEO_ASPECT_RATIO_INVALID, VIDEO_FORMAT_INVALID };
if (h264_parse_sps(nal_data, nal_len, &sps)) {
m_VideoHandler->SetVideoFormat(sps.format);
m_VideoHandler->SetVideoSize(sps.width, sps.height);
m_VideoHandler->SetVideoAspectRatio(sps.aspect_ratio);
sps_found = true;
}
}
}
// ... for supplemental enhancement information
if (!sei_found && (buf[i] == 0x00) && (buf[i + 1] == 0x00) && (buf[i + 2] == 0x01) && (buf[i + 3] & 0x1f) == NAL_SEI) {
uint8_t nal_data[len];
int nal_len;
//Dprintf("H.264: Found NAL SEI at offset %d/%d", i, len);
if (0 < (nal_len = h264_nal_unescape(nal_data, buf + i + 4, len - i - 4))) {
h264_sei_data_t sei = { 0, 0, VIDEO_SCAN_INVALID };
if (h264_parse_sei(nal_data, nal_len, &sei)) {
m_VideoHandler->SetVideoFramerate(sei.frame_rate);
m_VideoHandler->SetVideoBitrate(sei.bitrate);
m_VideoHandler->SetVideoScan(sei.scan);
sei_found = true;
}
}
}
if (sps_found && sei_found)
break;
}
return true;
} }

View File

@ -12,8 +12,23 @@
class cFemonH264 { class cFemonH264 {
private: private:
enum {
NAL_SEI = 0x06, // Supplemental Enhancement Information
NAL_SPS = 0x07, // Sequence Parameter Set
NAL_AUD = 0x09, // Access Unit Delimiter
NAL_END_SEQ = 0x0A // End of Sequence
};
cFemonVideoIf *m_VideoHandler; cFemonVideoIf *m_VideoHandler;
const uint8_t *nextStartCode(const uint8_t *start, const uint8_t *end);
int nalUnescape(uint8_t *dst, const uint8_t *src, int len);
int parseSPS(const uint8_t *buf, int len);
int parseSEI(const uint8_t *buf, int len);
static const eVideoAspectRatio s_AspectRatios[];
static const eVideoFormat s_VideoFormats[];
public: public:
cFemonH264(cFemonVideoIf *videohandler); cFemonH264(cFemonVideoIf *videohandler);
virtual ~cFemonH264(); virtual ~cFemonH264();

View File

@ -95,6 +95,7 @@ public:
uint32_t getU32() { return ((getBits(8) << 24) | (getBits(8) << 16) | (getBits(8) << 8) | getBits(8)); } uint32_t getU32() { return ((getBits(8) << 24) | (getBits(8) << 16) | (getBits(8) << 8) | getBits(8)); }
bool isEOF() { return (index >= count); } bool isEOF() { return (index >= count); }
void reset() { index = 0; } void reset() { index = 0; }
int getIndex() { return (isEOF() ? count : index);}
}; };
#endif // __FEMONTOOLS_H #endif // __FEMONTOOLS_H