mirror of
https://github.com/VDR4Arch/vdr.git
synced 2023-10-10 13:36:52 +02:00
Revoked the changes to cFrameDetector that have been introduced in version 1.7.19
This commit is contained in:
parent
6c110a7272
commit
96061c5b40
3
HISTORY
3
HISTORY
@ -6699,7 +6699,7 @@ Video Disk Recorder Revision History
|
|||||||
constructor is negative (avoids the "cTimeMs: using monotonic clock..." log message
|
constructor is negative (avoids the "cTimeMs: using monotonic clock..." log message
|
||||||
before VDR's starting log message).
|
before VDR's starting log message).
|
||||||
|
|
||||||
2011-08-27: Version 1.7.21
|
2011-09-04: Version 1.7.21
|
||||||
|
|
||||||
- Fixed detecting frames for channels that split frames into several payloads
|
- Fixed detecting frames for channels that split frames into several payloads
|
||||||
(reported by Derek Kelly).
|
(reported by Derek Kelly).
|
||||||
@ -6738,3 +6738,4 @@ Video Disk Recorder Revision History
|
|||||||
receiving on SD FF devices and uses the device only for output (thanks to Udo
|
receiving on SD FF devices and uses the device only for output (thanks to Udo
|
||||||
Richter).
|
Richter).
|
||||||
- Fixed detecting frames on radio channels (reported by Chris Mayo).
|
- Fixed detecting frames on radio channels (reported by Chris Mayo).
|
||||||
|
- Revoked the changes to cFrameDetector that have been introduced in version 1.7.19.
|
||||||
|
50
recorder.c
50
recorder.c
@ -4,7 +4,7 @@
|
|||||||
* See the main source file 'vdr.c' for copyright information and
|
* See the main source file 'vdr.c' for copyright information and
|
||||||
* how to reach the author.
|
* how to reach the author.
|
||||||
*
|
*
|
||||||
* $Id: recorder.c 2.14 2011/08/13 14:56:36 kls Exp $
|
* $Id: recorder.c 2.15 2011/09/04 09:26:44 kls Exp $
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include "recorder.h"
|
#include "recorder.h"
|
||||||
@ -31,7 +31,7 @@ cRecorder::cRecorder(const char *FileName, const cChannel *Channel, int Priority
|
|||||||
|
|
||||||
SpinUpDisk(FileName);
|
SpinUpDisk(FileName);
|
||||||
|
|
||||||
ringBuffer = new cRingBufferLinear(RECORDERBUFSIZE, TS_SIZE, true, "Recorder");
|
ringBuffer = new cRingBufferLinear(RECORDERBUFSIZE, MIN_TS_PACKETS_FOR_FRAME_DETECTOR * TS_SIZE, true, "Recorder");
|
||||||
ringBuffer->SetTimeouts(0, 100);
|
ringBuffer->SetTimeouts(0, 100);
|
||||||
|
|
||||||
int Pid = Channel->Vpid();
|
int Pid = Channel->Vpid();
|
||||||
@ -88,7 +88,7 @@ bool cRecorder::RunningLowOnDiskSpace(void)
|
|||||||
|
|
||||||
bool cRecorder::NextFile(void)
|
bool cRecorder::NextFile(void)
|
||||||
{
|
{
|
||||||
if (recordFile) {
|
if (recordFile && frameDetector->IndependentFrame()) { // every file shall start with an independent frame
|
||||||
if (fileSize > MEGABYTE(off_t(Setup.MaxVideoFileSize)) || RunningLowOnDiskSpace()) {
|
if (fileSize > MEGABYTE(off_t(Setup.MaxVideoFileSize)) || RunningLowOnDiskSpace()) {
|
||||||
recordFile = fileName->NextFile();
|
recordFile = fileName->NextFile();
|
||||||
fileSize = 0;
|
fileSize = 0;
|
||||||
@ -119,11 +119,6 @@ void cRecorder::Action(void)
|
|||||||
time_t t = time(NULL);
|
time_t t = time(NULL);
|
||||||
bool InfoWritten = false;
|
bool InfoWritten = false;
|
||||||
bool FirstIframeSeen = false;
|
bool FirstIframeSeen = false;
|
||||||
#define BUFFERSIZE (5 * TS_SIZE)
|
|
||||||
bool Buffering = false;
|
|
||||||
int BufferIndex = 0;
|
|
||||||
int MaxBufferIndex = 0;
|
|
||||||
uchar *Buffer = NULL;
|
|
||||||
while (Running()) {
|
while (Running()) {
|
||||||
int r;
|
int r;
|
||||||
uchar *b = ringBuffer->Get(r);
|
uchar *b = ringBuffer->Get(r);
|
||||||
@ -144,34 +139,9 @@ void cRecorder::Action(void)
|
|||||||
}
|
}
|
||||||
InfoWritten = true;
|
InfoWritten = true;
|
||||||
}
|
}
|
||||||
if (frameDetector->NewPayload()) { // We're at the first TS packet of a new payload...
|
if (FirstIframeSeen || frameDetector->IndependentFrame()) {
|
||||||
if (Buffering)
|
|
||||||
esyslog("ERROR: encountered new payload while buffering - dropping some data!");
|
|
||||||
if (!frameDetector->NewFrame()) { // ...but the frame type is yet unknown, so we need to buffer packets until we see the frame type
|
|
||||||
if (!Buffer) {
|
|
||||||
dsyslog("frame type not in first packet of payload - buffering");
|
|
||||||
if (!(Buffer = MALLOC(uchar, BUFFERSIZE))) {
|
|
||||||
esyslog("ERROR: can't allocate frame type buffer");
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
BufferIndex = 0;
|
|
||||||
Buffering = true;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
else if (frameDetector->NewFrame()) // now we know the frame type, so stop buffering
|
|
||||||
Buffering = false;
|
|
||||||
if (Buffering) {
|
|
||||||
if (BufferIndex + Count <= BUFFERSIZE) {
|
|
||||||
memcpy(Buffer + BufferIndex, b, Count);
|
|
||||||
BufferIndex += Count;
|
|
||||||
}
|
|
||||||
else
|
|
||||||
esyslog("ERROR: too many bytes for frame type buffer (%d > %d) - dropped %d bytes", BufferIndex + Count, int(BUFFERSIZE), Count);
|
|
||||||
}
|
|
||||||
else if (FirstIframeSeen || frameDetector->IndependentFrame()) {
|
|
||||||
FirstIframeSeen = true; // start recording with the first I-frame
|
FirstIframeSeen = true; // start recording with the first I-frame
|
||||||
if (frameDetector->IndependentFrame() && !NextFile()) // every file shall start with an independent frame
|
if (!NextFile())
|
||||||
break;
|
break;
|
||||||
if (index && frameDetector->NewFrame())
|
if (index && frameDetector->NewFrame())
|
||||||
index->Write(frameDetector->IndependentFrame(), fileName->Number(), fileSize);
|
index->Write(frameDetector->IndependentFrame(), fileName->Number(), fileSize);
|
||||||
@ -184,12 +154,6 @@ void cRecorder::Action(void)
|
|||||||
fileSize += TS_SIZE;
|
fileSize += TS_SIZE;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (BufferIndex) {
|
|
||||||
recordFile->Write(Buffer, BufferIndex); // if an error occurs here, the next write below will catch and report it
|
|
||||||
if (BufferIndex > MaxBufferIndex)
|
|
||||||
MaxBufferIndex = BufferIndex;
|
|
||||||
BufferIndex = 0;
|
|
||||||
}
|
|
||||||
if (recordFile->Write(b, Count) < 0) {
|
if (recordFile->Write(b, Count) < 0) {
|
||||||
LOG_ERROR_STR(fileName->Name());
|
LOG_ERROR_STR(fileName->Name());
|
||||||
break;
|
break;
|
||||||
@ -207,8 +171,4 @@ void cRecorder::Action(void)
|
|||||||
t = time(NULL);
|
t = time(NULL);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (Buffer) {
|
|
||||||
free(Buffer);
|
|
||||||
dsyslog("frame type buffer used %d bytes", MaxBufferIndex);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
15
recording.c
15
recording.c
@ -4,7 +4,7 @@
|
|||||||
* See the main source file 'vdr.c' for copyright information and
|
* See the main source file 'vdr.c' for copyright information and
|
||||||
* how to reach the author.
|
* how to reach the author.
|
||||||
*
|
*
|
||||||
* $Id: recording.c 2.37 2011/08/27 10:55:53 kls Exp $
|
* $Id: recording.c 2.38 2011/09/04 09:32:25 kls Exp $
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include "recording.h"
|
#include "recording.h"
|
||||||
@ -1434,12 +1434,11 @@ void cIndexFileGenerator::Action(void)
|
|||||||
bool Rewind = false;
|
bool Rewind = false;
|
||||||
cFileName FileName(recordingName, false);
|
cFileName FileName(recordingName, false);
|
||||||
cUnbufferedFile *ReplayFile = FileName.Open();
|
cUnbufferedFile *ReplayFile = FileName.Open();
|
||||||
cRingBufferLinear Buffer(IFG_BUFFER_SIZE, TS_SIZE);
|
cRingBufferLinear Buffer(IFG_BUFFER_SIZE, MIN_TS_PACKETS_FOR_FRAME_DETECTOR * TS_SIZE);
|
||||||
cPatPmtParser PatPmtParser;
|
cPatPmtParser PatPmtParser;
|
||||||
cFrameDetector FrameDetector;
|
cFrameDetector FrameDetector;
|
||||||
cIndexFile IndexFile(recordingName, true);
|
cIndexFile IndexFile(recordingName, true);
|
||||||
int BufferChunks = KILOBYTE(1); // no need to read a lot at the beginning when parsing PAT/PMT
|
int BufferChunks = KILOBYTE(1); // no need to read a lot at the beginning when parsing PAT/PMT
|
||||||
int FileNumber = 0;
|
|
||||||
off_t FileSize = 0;
|
off_t FileSize = 0;
|
||||||
off_t FrameOffset = -1;
|
off_t FrameOffset = -1;
|
||||||
Skins.QueueMessage(mtInfo, tr("Regenerating index file"));
|
Skins.QueueMessage(mtInfo, tr("Regenerating index file"));
|
||||||
@ -1456,18 +1455,12 @@ void cIndexFileGenerator::Action(void)
|
|||||||
if (Data) {
|
if (Data) {
|
||||||
if (FrameDetector.Synced()) {
|
if (FrameDetector.Synced()) {
|
||||||
// Step 3 - generate the index:
|
// Step 3 - generate the index:
|
||||||
if (FrameOffset < 0 && TsPid(Data) == PATPID) {
|
if (TsPid(Data) == PATPID)
|
||||||
FileNumber = FileName.Number();
|
|
||||||
FrameOffset = FileSize; // the PAT/PMT is at the beginning of an I-frame
|
FrameOffset = FileSize; // the PAT/PMT is at the beginning of an I-frame
|
||||||
}
|
|
||||||
int Processed = FrameDetector.Analyze(Data, Length);
|
int Processed = FrameDetector.Analyze(Data, Length);
|
||||||
if (Processed > 0) {
|
if (Processed > 0) {
|
||||||
if (FrameDetector.NewPayload() && FrameOffset < 0) {
|
|
||||||
FileNumber = FileName.Number();
|
|
||||||
FrameOffset = FileSize;
|
|
||||||
}
|
|
||||||
if (FrameDetector.NewFrame()) {
|
if (FrameDetector.NewFrame()) {
|
||||||
IndexFile.Write(FrameDetector.IndependentFrame(), FileNumber, FrameOffset);
|
IndexFile.Write(FrameDetector.IndependentFrame(), FileName.Number(), FrameOffset >= 0 ? FrameOffset : FileSize);
|
||||||
FrameOffset = -1;
|
FrameOffset = -1;
|
||||||
}
|
}
|
||||||
FileSize += Processed;
|
FileSize += Processed;
|
||||||
|
56
remux.c
56
remux.c
@ -4,7 +4,7 @@
|
|||||||
* See the main source file 'vdr.c' for copyright information and
|
* See the main source file 'vdr.c' for copyright information and
|
||||||
* how to reach the author.
|
* how to reach the author.
|
||||||
*
|
*
|
||||||
* $Id: remux.c 2.60 2011/08/27 14:20:18 kls Exp $
|
* $Id: remux.c 2.61 2011/09/04 10:13:14 kls Exp $
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#include "remux.h"
|
#include "remux.h"
|
||||||
@ -785,8 +785,7 @@ cFrameDetector::cFrameDetector(int Pid, int Type)
|
|||||||
{
|
{
|
||||||
SetPid(Pid, Type);
|
SetPid(Pid, Type);
|
||||||
synced = false;
|
synced = false;
|
||||||
newPayload = newFrame = independentFrame = false;
|
newFrame = independentFrame = false;
|
||||||
frameTypeOffset = -1;
|
|
||||||
numPtsValues = 0;
|
numPtsValues = 0;
|
||||||
numFrames = 0;
|
numFrames = 0;
|
||||||
numIFrames = 0;
|
numIFrames = 0;
|
||||||
@ -813,8 +812,7 @@ void cFrameDetector::SetPid(int Pid, int Type)
|
|||||||
|
|
||||||
void cFrameDetector::Reset(void)
|
void cFrameDetector::Reset(void)
|
||||||
{
|
{
|
||||||
newPayload = newFrame = independentFrame = false;
|
newFrame = independentFrame = false;
|
||||||
frameTypeOffset = -1;
|
|
||||||
payloadUnitOfFrame = 0;
|
payloadUnitOfFrame = 0;
|
||||||
scanning = false;
|
scanning = false;
|
||||||
scanner = EMPTY_SCANNER;
|
scanner = EMPTY_SCANNER;
|
||||||
@ -822,8 +820,9 @@ void cFrameDetector::Reset(void)
|
|||||||
|
|
||||||
int cFrameDetector::Analyze(const uchar *Data, int Length)
|
int cFrameDetector::Analyze(const uchar *Data, int Length)
|
||||||
{
|
{
|
||||||
|
int SeenPayloadStart = false;
|
||||||
int Processed = 0;
|
int Processed = 0;
|
||||||
newPayload = newFrame = independentFrame = false;
|
newFrame = independentFrame = false;
|
||||||
while (Length >= TS_SIZE) {
|
while (Length >= TS_SIZE) {
|
||||||
if (Data[0] != TS_SYNC_BYTE) {
|
if (Data[0] != TS_SYNC_BYTE) {
|
||||||
int Skipped = 1;
|
int Skipped = 1;
|
||||||
@ -836,8 +835,11 @@ int cFrameDetector::Analyze(const uchar *Data, int Length)
|
|||||||
int Pid = TsPid(Data);
|
int Pid = TsPid(Data);
|
||||||
if (Pid == pid) {
|
if (Pid == pid) {
|
||||||
if (TsPayloadStart(Data)) {
|
if (TsPayloadStart(Data)) {
|
||||||
|
SeenPayloadStart = true;
|
||||||
if (synced && Processed)
|
if (synced && Processed)
|
||||||
return Processed; // flush everything before this new payload
|
return Processed;
|
||||||
|
if (Length < MIN_TS_PACKETS_FOR_FRAME_DETECTOR * TS_SIZE)
|
||||||
|
return Processed; // need more data, in case the frame type is not stored in the first TS packet
|
||||||
if (framesPerSecond <= 0.0) {
|
if (framesPerSecond <= 0.0) {
|
||||||
// frame rate unknown, so collect a sequence of PTS values:
|
// frame rate unknown, so collect a sequence of PTS values:
|
||||||
if (numPtsValues < 2 || numPtsValues < MaxPtsValues && numIFrames < 2) { // collect a sequence containing at least two I-frames
|
if (numPtsValues < 2 || numPtsValues < MaxPtsValues && numIFrames < 2) { // collect a sequence containing at least two I-frames
|
||||||
@ -902,10 +904,6 @@ int cFrameDetector::Analyze(const uchar *Data, int Length)
|
|||||||
if (scanning) {
|
if (scanning) {
|
||||||
int PayloadOffset = TsPayloadOffset(Data);
|
int PayloadOffset = TsPayloadOffset(Data);
|
||||||
if (TsPayloadStart(Data)) {
|
if (TsPayloadStart(Data)) {
|
||||||
if (synced && Processed)
|
|
||||||
return Processed; // flush everything before this new payload
|
|
||||||
newPayload = true;
|
|
||||||
scanner = EMPTY_SCANNER;
|
|
||||||
PayloadOffset += PesPayloadOffset(Data + PayloadOffset);
|
PayloadOffset += PesPayloadOffset(Data + PayloadOffset);
|
||||||
if (!framesPerPayloadUnit)
|
if (!framesPerPayloadUnit)
|
||||||
framesPerPayloadUnit = framesInPayloadUnit;
|
framesPerPayloadUnit = framesInPayloadUnit;
|
||||||
@ -913,29 +911,17 @@ int cFrameDetector::Analyze(const uchar *Data, int Length)
|
|||||||
dbgframes("/");
|
dbgframes("/");
|
||||||
}
|
}
|
||||||
for (int i = PayloadOffset; scanning && i < TS_SIZE; i++) {
|
for (int i = PayloadOffset; scanning && i < TS_SIZE; i++) {
|
||||||
if (frameTypeOffset < 0) {
|
|
||||||
scanner <<= 8;
|
scanner <<= 8;
|
||||||
scanner |= Data[i];
|
scanner |= Data[i];
|
||||||
}
|
|
||||||
else
|
|
||||||
frameTypeOffset += PayloadOffset;
|
|
||||||
switch (type) {
|
switch (type) {
|
||||||
case 0x01: // MPEG 1 video
|
case 0x01: // MPEG 1 video
|
||||||
case 0x02: // MPEG 2 video
|
case 0x02: // MPEG 2 video
|
||||||
if (scanner == 0x00000100) { // Picture Start Code
|
if (scanner == 0x00000100) { // Picture Start Code
|
||||||
if (frameTypeOffset < 0) {
|
|
||||||
frameTypeOffset = i + 2;
|
|
||||||
if (frameTypeOffset >= TS_SIZE) { // the byte to check is in the next TS packet
|
|
||||||
frameTypeOffset -= TS_SIZE;
|
|
||||||
if (!synced)
|
|
||||||
dbgframes("%d>", frameTypeOffset);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
scanner = EMPTY_SCANNER;
|
scanner = EMPTY_SCANNER;
|
||||||
|
if (synced && !SeenPayloadStart && Processed)
|
||||||
|
return Processed; // flush everything before this new frame
|
||||||
newFrame = true;
|
newFrame = true;
|
||||||
uchar FrameType = (Data[frameTypeOffset] >> 3) & 0x07;
|
uchar FrameType = (Data[i + 2] >> 3) & 0x07;
|
||||||
frameTypeOffset = -1;
|
|
||||||
independentFrame = FrameType == 1; // I-Frame
|
independentFrame = FrameType == 1; // I-Frame
|
||||||
if (synced) {
|
if (synced) {
|
||||||
if (framesPerPayloadUnit <= 1)
|
if (framesPerPayloadUnit <= 1)
|
||||||
@ -955,30 +941,20 @@ int cFrameDetector::Analyze(const uchar *Data, int Length)
|
|||||||
break;
|
break;
|
||||||
case 0x1B: // MPEG 4 video
|
case 0x1B: // MPEG 4 video
|
||||||
if (scanner == 0x00000109) { // Access Unit Delimiter
|
if (scanner == 0x00000109) { // Access Unit Delimiter
|
||||||
if (frameTypeOffset < 0) {
|
|
||||||
frameTypeOffset = i + 1;
|
|
||||||
if (frameTypeOffset >= TS_SIZE) { // the byte to check is in the next TS packet
|
|
||||||
frameTypeOffset -= TS_SIZE;
|
|
||||||
if (!synced)
|
|
||||||
dbgframes("%d>", frameTypeOffset);
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
scanner = EMPTY_SCANNER;
|
scanner = EMPTY_SCANNER;
|
||||||
|
if (synced && !SeenPayloadStart && Processed)
|
||||||
|
return Processed; // flush everything before this new frame
|
||||||
newFrame = true;
|
newFrame = true;
|
||||||
uchar FrameType = Data[frameTypeOffset];
|
uchar FrameType = Data[i + 1];
|
||||||
frameTypeOffset = -1;
|
|
||||||
independentFrame = FrameType == 0x10;
|
independentFrame = FrameType == 0x10;
|
||||||
if (synced) {
|
if (synced) {
|
||||||
if (framesPerPayloadUnit < 0) {
|
if (framesPerPayloadUnit < 0) {
|
||||||
payloadUnitOfFrame = (payloadUnitOfFrame + 1) % -framesPerPayloadUnit;
|
payloadUnitOfFrame = (payloadUnitOfFrame + 1) % -framesPerPayloadUnit;
|
||||||
if (payloadUnitOfFrame != 0 && independentFrame)
|
if (payloadUnitOfFrame != 0 && independentFrame)
|
||||||
payloadUnitOfFrame = 0;
|
payloadUnitOfFrame = 0;
|
||||||
if (payloadUnitOfFrame) {
|
if (payloadUnitOfFrame)
|
||||||
newPayload = false;
|
|
||||||
newFrame = false;
|
newFrame = false;
|
||||||
}
|
}
|
||||||
}
|
|
||||||
if (framesPerPayloadUnit <= 1)
|
if (framesPerPayloadUnit <= 1)
|
||||||
scanning = false;
|
scanning = false;
|
||||||
}
|
}
|
||||||
|
11
remux.h
11
remux.h
@ -4,7 +4,7 @@
|
|||||||
* See the main source file 'vdr.c' for copyright information and
|
* See the main source file 'vdr.c' for copyright information and
|
||||||
* how to reach the author.
|
* how to reach the author.
|
||||||
*
|
*
|
||||||
* $Id: remux.h 2.30 2011/06/12 12:49:17 kls Exp $
|
* $Id: remux.h 2.31 2011/09/04 09:09:33 kls Exp $
|
||||||
*/
|
*/
|
||||||
|
|
||||||
#ifndef __REMUX_H
|
#ifndef __REMUX_H
|
||||||
@ -336,16 +336,16 @@ void PesDump(const char *Name, const u_char *Data, int Length);
|
|||||||
|
|
||||||
// Frame detector:
|
// Frame detector:
|
||||||
|
|
||||||
|
#define MIN_TS_PACKETS_FOR_FRAME_DETECTOR 2
|
||||||
|
|
||||||
class cFrameDetector {
|
class cFrameDetector {
|
||||||
private:
|
private:
|
||||||
enum { MaxPtsValues = 150 };
|
enum { MaxPtsValues = 150 };
|
||||||
int pid;
|
int pid;
|
||||||
int type;
|
int type;
|
||||||
bool synced;
|
bool synced;
|
||||||
bool newPayload;
|
|
||||||
bool newFrame;
|
bool newFrame;
|
||||||
bool independentFrame;
|
bool independentFrame;
|
||||||
int frameTypeOffset;
|
|
||||||
uint32_t ptsValues[MaxPtsValues]; // 32 bit is enough - we only need the delta
|
uint32_t ptsValues[MaxPtsValues]; // 32 bit is enough - we only need the delta
|
||||||
int numPtsValues;
|
int numPtsValues;
|
||||||
int numFrames;
|
int numFrames;
|
||||||
@ -377,11 +377,6 @@ public:
|
|||||||
///< Analyze() needs to be called again with more actual data.
|
///< Analyze() needs to be called again with more actual data.
|
||||||
bool Synced(void) { return synced; }
|
bool Synced(void) { return synced; }
|
||||||
///< Returns true if the frame detector has synced on the data stream.
|
///< Returns true if the frame detector has synced on the data stream.
|
||||||
bool NewPayload(void) { return newPayload; }
|
|
||||||
///< Returns true if the data given to the last call to Analyze() started a
|
|
||||||
///< new payload. The caller should remember the current file offset in
|
|
||||||
///< order to be able to generate an index entry later, when NewFrame()
|
|
||||||
///< returns true.
|
|
||||||
bool NewFrame(void) { return newFrame; }
|
bool NewFrame(void) { return newFrame; }
|
||||||
///< Returns true if the data given to the last call to Analyze() started a
|
///< Returns true if the data given to the last call to Analyze() started a
|
||||||
///< new frame.
|
///< new frame.
|
||||||
|
Loading…
Reference in New Issue
Block a user