Commit fbdd51ed by 景炳强

1.解决内存泄漏问题1.修改补帧方式 2.添加黑边自定义高度 3.修改音频卡顿

parent a07c84f7
CaptureThread::AddFrame::fps:: 0 10
ProcessThread::run::fps:: 0 7
CaptureThread::AddFrame::fps:: 2 11
ProcessThread::run::fps:: 2 6
ProcessThread::run::fps:: 3 6
CaptureThread::AddFrame::fps:: 3 11
CaptureThread::AddFrame::fps:: 1 14
ProcessThread::run::fps:: 1 0
CaptureThread::AddFrame::fps:: 0 20
CaptureThread::AddFrame::fps:: 2 19
ProcessThread::run::fps:: 0 15
ProcessThread::run::fps:: 3 15
ProcessThread::run::fps:: 2 15
CaptureThread::AddFrame::fps:: 3 19
CaptureThread::AddFrame::fps:: 1 19
......@@ -99,15 +99,15 @@
<ClCompile>
<TreatWChar_tAsBuiltInType>true</TreatWChar_tAsBuiltInType>
<MultiProcessorCompilation>true</MultiProcessorCompilation>
<DebugInformationFormat>None</DebugInformationFormat>
<Optimization>MaxSpeed</Optimization>
<DebugInformationFormat>ProgramDatabase</DebugInformationFormat>
<Optimization>Disabled</Optimization>
<RuntimeLibrary>MultiThreadedDLL</RuntimeLibrary>
<UsePrecompiledHeader>Use</UsePrecompiledHeader>
<PrecompiledHeaderFile>stdafx.h</PrecompiledHeaderFile>
</ClCompile>
<Link>
<SubSystem>Windows</SubSystem>
<GenerateDebugInformation>false</GenerateDebugInformation>
<GenerateDebugInformation>true</GenerateDebugInformation>
</Link>
<QtMoc>
<PrependInclude>stdafx.h;%(PrependInclude)</PrependInclude>
......
......@@ -92,6 +92,7 @@ private:
// Private methods
void scheduleVideoFramesFunc(void);
void scheduleAudioFramesFunc(void);
void scheduleAudioFramesFuncDeley(void);
void outputAudioFrameFunc(void);
bool waitForReferenceSignalToLock(void);
void checkEndOfPreroll(void);
......
......@@ -29,6 +29,7 @@ private:
{
ComPtr<IDeckLinkVideoInputFrame> video_frame;
qint64 timestamp;
qint64 sequenceNum = 0;
}videoFrameData;
static int s_count;
int idx;
......@@ -43,4 +44,5 @@ private:
std::queue<videoFrameData> taskVideoQueue;
std::condition_variable cv;
std::mutex mutex;
qint64 sequenceNum = 0;
};
\ No newline at end of file
......@@ -178,6 +178,7 @@ private:
void WorkCutImage(std::shared_ptr<Image>& pImage, RoiMessage& roi);
void WorkCutImage720p(std::shared_ptr<Image>& pImage, RoiMessage& roi);
void cutRun();
void cutRunFront();
private:
//SampleQueue<Image> taskPrerollQueue;
SampleQueue<std::shared_ptr<Image>> taskImageQueue;
......@@ -203,4 +204,7 @@ private:
int continuousLostNums;//ʾղudp
int minTaskImageQueueSize;
bool firstMinSize = false;
};
\ No newline at end of file
......@@ -11,7 +11,8 @@ public:
~AudioPacket();
public:
QByteArray audio_data;
//QByteArray audio_data;
qint8* buffer = NULL;
qint32 sample;
qint32 channel;
qint32 size;
......
......@@ -15,7 +15,7 @@ public:
Image();
Image(IDeckLinkVideoInputFrame* videoFrame);
Image(ComPtr<DeckLinkInputVideoFrame> videoFrame);
Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame,qint64 curtimestamp);
Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame,const qint64& curtimestamp,const qint64& sequence);
Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame);
Image(const Image& other);
Image(Image&& other);
......@@ -54,6 +54,7 @@ public:
BMDTimeValue getOutputFrameCompletedReferenceTime() { return outputFrameCompletedReferenceTime; }
BMDOutputFrameCompletionResult getOutputCompletionResult() { return outputFrameCompletionResult; }
int64_t getInputFrameCurTimeStamp() { return inputFrameCurTimeStamp; }
int64_t getInputFrameSequence() { return sequenceNum; }
void GetMatByRoi(cv::Rect roi, cv::Mat& mat);
......@@ -80,6 +81,7 @@ private:
BMDTimeValue outputFrameCompletedReferenceTime;
qint64 inputFrameCurTimeStamp;//
qint64 sequenceNum;//
BMDOutputFrameCompletionResult outputFrameCompletionResult;
};
......
......@@ -77,7 +77,7 @@ HRESULT DeckLinkInputDevice::VideoInputFrameArrived(IDeckLinkVideoInputFrame* vi
// Since this application only previews, everything is driven from IDeckLinkScreenPreviewCallback::DrawFrame
ComPtr<IDeckLinkVideoInputFrame> frame = ComPtr<IDeckLinkVideoInputFrame>(videoFrame);
//emit ArrivedFrame(frame);
if (Capture) {
if (videoFrame && Capture) {
auto cur_time = QDateTime::currentMSecsSinceEpoch();
Capture->AddFrame(frame, cur_time);
}
......
......@@ -23,7 +23,7 @@ DeckLinkInputVideoFrame::DeckLinkInputVideoFrame(ComPtr<IDeckLinkVideoInputFrame
{
rowBytes = GetRowBytesFromPixelFormat(width, pixelFormat);
buffer.resize(height * rowBytes);
ComPtr<IDeckLinkVideoConversion> deckLinkVideoConversion = nullptr;
HRESULT result = CoCreateInstance(CLSID_CDeckLinkVideoConversion, nullptr, CLSCTX_ALL, IID_IDeckLinkVideoConversion, (void**)deckLinkVideoConversion.GetAddressOf());
if (!deckLinkVideoConversion)
......
......@@ -8,6 +8,7 @@ extern int OutputDeleyTime;
extern int OutputPlayMode;
#define OUTPUT_1080 1
#define AUDIOMAXSIZE (OutputDeleyTime + 3) * 50
DeckLinkOutputDevice::DeckLinkOutputDevice(ComPtr<IDeckLink>& decklink, int videoPrerollSize,int index)
: RefCount(1),
......@@ -153,27 +154,46 @@ bool DeckLinkOutputDevice::StartPlayback(BMDDisplayMode displayMode, bool enable
if((deckLinkOutput->DoesSupportVideoMode(bmdVideoConnectionUnspecified, outputDisplayMode, pixelFormat, bmdNoVideoOutputConversion, supportedVideoModeFlags, nullptr, &displayModeSupported) != S_OK) || !displayModeSupported)
{
qDebug() << "call DoesSupportVideoMode fuc is failure" << "\n";
return false;
}
if (deckLinkOutput->GetDisplayMode(outputDisplayMode, deckLinkDisplayMode.ReleaseAndGetAddressOf()) != S_OK)
{
qDebug() << "call GetDisplayMode fuc is failure" << "\n";
return false;
}
if (deckLinkDisplayMode->GetFrameRate(&frameDuration, &frameTimescale) != S_OK)
{
qDebug() << "call GetFrameRate fuc is failure" << "\n";
return false;
}
if (enable3D)
outputFlags = (BMDVideoOutputFlags)(outputFlags | bmdVideoOutputDualStream3D);
// Reference DeckLinkOutputDevice delegate callbacks
if (deckLinkOutput->SetScheduledFrameCompletionCallback(this) != S_OK)
{
qDebug() << "call SetScheduledFrameCompletionCallback fuc is failure" << "\n";
return false;
}
if (deckLinkOutput->EnableVideoOutput(outputDisplayMode, outputFlags) != S_OK)
{
qDebug() << "call EnableVideoOutput fuc is failure" << "\n";
return false;
}
if (deckLinkOutput->EnableAudioOutput(bmdAudioSampleRate48kHz,bmdAudioSampleType16bitInteger,2,bmdAudioOutputStreamTimestamped) != S_OK)
{
qDebug() << "call EnableAudioOutput fuc is failure" << "\n";
return false;
}
deckLinkOutput->SetScreenPreviewCallback(screenPreviewCallback);
......@@ -187,7 +207,7 @@ bool DeckLinkOutputDevice::StartPlayback(BMDDisplayMode displayMode, bool enable
outputAudioFrameQueue.Reset();
scheduleVideoFramesThread = std::thread(&DeckLinkOutputDevice::scheduleVideoFramesFunc, this);
scheduleAudioFramesThread = std::thread(&DeckLinkOutputDevice::scheduleAudioFramesFunc, this);
scheduleAudioFramesThread = std::thread(&DeckLinkOutputDevice::scheduleAudioFramesFuncDeley, this);
{
//std::lock_guard<std::mutex> locker(mutex);
state = PlaybackState::Prerolling;
......@@ -305,6 +325,7 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
void DeckLinkOutputDevice::scheduleVideoFramesFunc()
{
qDebug() << "start scheduleVideoFramesFunc function........" << "\n";
while(true)
{
START_WAIT_TIME_COUNTER
......@@ -315,7 +336,7 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
{
auto now_time = QDateTime::currentMSecsSinceEpoch();
auto dever_time = now_time - outputImage->getInputFrameCurTimeStamp();
qDebug() << "index:"<<Index << "input frame cur time:" << outputImage->getInputFrameCurTimeStamp() << " now time:" << now_time << " dever time:" << dever_time << "\n";
//qDebug() << "index:"<<Index << "input frame cur time:" << outputImage->getInputFrameCurTimeStamp() << " now time:" << now_time << " dever time:" << dever_time << "\n";
if (dever_time < OutputDeleyTime * 1000)
{
current_sleep_ms = OutputDeleyTime * 1000 - dever_time;
......@@ -341,6 +362,7 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
if(deckLinkOutput->DisplayVideoFrameSync(videoFrame.Get()))
{
fprintf(stderr, "Unable to schedule output video frame\n");
qDebug() << "Unable to schedule output video frame"<<"\n";
break;
}
......@@ -356,6 +378,44 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
}
}
void DeckLinkOutputDevice::scheduleAudioFramesFuncDeley(void)
{
while (true)
{
std::shared_ptr<AudioPacket> audio_packet;
if (outputAudioFrameQueue.WaitFor(audio_packet))
{
auto now_time = QDateTime::currentMSecsSinceEpoch();
auto data = audio_packet->buffer;
auto sample = audio_packet->sample;
auto audio_tm = audio_packet->frame_time_stamp;
auto dever_time = now_time - audio_tm;
//qDebug() << "index:" << Index << "input frame cur time:" << outputImage->getInputFrameCurTimeStamp() << " now time:" << now_time << " dever time:" << dever_time << "\n";
if (dever_time < OutputDeleyTime * 1000)
{
current_sleep_ms = OutputDeleyTime * 1000 - dever_time;
std::this_thread::sleep_for(std::chrono::milliseconds(current_sleep_ms));
}
//qDebug() << "index:" << Index << "send sdi audio timestamp:" << audio_tm << ",video timestamp:" << current_video_time << "\n";
quint32 sampleFramesWritten;
HRESULT ret = deckLinkOutput->WriteAudioSamplesSync(data, sample, &sampleFramesWritten);
if (ret == S_OK)
{
if (sampleFramesWritten < sample)
{
}
}
else
{
qDebug() << "Unable to schedule output audio frame" << "\n";
break;
}
}
}
}
void DeckLinkOutputDevice::scheduleAudioFramesFunc(void)
{
while (true)
......@@ -363,7 +423,7 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
std::shared_ptr<AudioPacket> audio_packet;
if (outputAudioFrameQueue.WaitFor(audio_packet))
{
auto data = audio_packet->audio_data.data();
auto data = audio_packet->buffer;
auto sample = audio_packet->sample;
auto audio_tm = audio_packet->frame_time_stamp;
qint32 duration = sample * 1000 / 48000;
......@@ -447,7 +507,12 @@ void DeckLinkOutputDevice::AddFrame(std::shared_ptr<Image> image)
return;
if (outputVideoFrameQueue.Size() >= 4)
qDebug() << "DeckLinkOutputDevice::AddFrame video---qsize:" << "\t" << outputVideoFrameQueue.Size() << "\n";
qDebug() << "index:" << Index << "DeckLinkOutputDevice::AddFrame video---qsize:" << "\t" << outputVideoFrameQueue.Size() << "\n";
if (outputVideoFrameQueue.Size() > AUDIOMAXSIZE)
{
outputVideoFrameQueue.Reset();
}
outputVideoFrameQueue.Push(image);
END_SLOT_TIME_COUNTER
......@@ -456,7 +521,12 @@ void DeckLinkOutputDevice::AddAudioFrame(std::shared_ptr<AudioPacket> audio_pack
{
START_SLOT_TIME_COUNTER
if (outputAudioFrameQueue.Size() >= 4)
qDebug() << "DeckLinkOutputDevice::AddAudioFrame audio---qsize:" << "\t" << outputAudioFrameQueue.Size() << "\n";
qDebug() << "index:" << Index << "DeckLinkOutputDevice::AddAudioFrame audio---qsize:" << "\t" << outputAudioFrameQueue.Size() << "\n";
if (outputAudioFrameQueue.Size() > AUDIOMAXSIZE)
{
outputAudioFrameQueue.Reset();
}
if (audio_packet)
{
outputAudioFrameQueue.Push(audio_packet);
......
......@@ -12,9 +12,11 @@
int OutputDeleyTime = 5;
int FrontDeleyTime = 1;
int FrameRate = 50;
int PrvwFlag = 0;
int OutputPlayMode = 0;
int BlackBottomHeight = 240;
qint64 StartTimeStamp = 0;
MomentaMedia::MomentaMedia(QWidget *parent)
: QMainWindow(parent)
......@@ -510,8 +512,17 @@ void MomentaMedia::ReadSettings()
settings.beginGroup("DELEYTIME");
OutputDeleyTime = settings.value("DELEY_TIME", "").toInt();
if (OutputDeleyTime < 1) OutputDeleyTime = 1;
else if (OutputDeleyTime > 20) OutputDeleyTime = 20;
FrontDeleyTime = settings.value("FRONT_DELEY_TIME", "").toInt();
if (FrontDeleyTime < 1 || FrontDeleyTime > OutputDeleyTime) FrontDeleyTime = 1;
FrameRate = settings.value("FRAME_RATE", "").toInt();
if (FrameRate < 25) FrameRate = 25;
BlackBottomHeight = settings.value("BLACK_BOTTOM_HEIGHT").toInt();
if (BlackBottomHeight < 0) BlackBottomHeight = 0;
else if (BlackBottomHeight > 480) BlackBottomHeight = 480;
PrvwFlag = settings.value("PRVW_FLAG", "").toInt();
OutputPlayMode = settings.value("OUTPUT_PLAY_MODE", "").toInt();
......
......@@ -96,7 +96,8 @@ void NDIOutputThread::run()
{
frame->Fill(Frame.p_data, Frame.xres * Frame.yres * 4);
//qDebug() << "wgt-----------------------------"<<"ndi(" << NDISenderName << ")timestamp : " << frame->getInputFrameCurTimeStamp() - StartTimeStamp << "\n";
Frame.timestamp = frame->getInputFrameCurTimeStamp();
//Frame.timestamp = frame->getInputFrameCurTimeStamp();
Frame.timestamp = frame->getInputFrameSequence();
NDIlib_send_send_video_v2(Instance, &Frame);
}
taskQueue.Pop(frame);
......
......@@ -37,12 +37,14 @@ void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const
int64_t st = QDateTime::currentMSecsSinceEpoch();
int64_t st2 = GetCurrTimeMS();
std::string tag = "CaptureThread::AddFrame::" + std::to_string(idx);
const char* tags = tag.c_str();
/*std::string tag = "CaptureThread::AddFrame::" + std::to_string(idx);
const char* tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
//qDebug() << "===> " << videoFrame->GetWidth() << " " << videoFrame->GetHeight() << "\n";
recvFrames++;
/*recvFrames++;
int elapse = (QDateTime::currentMSecsSinceEpoch() - recvStartTime) / 1000;
if (elapse >= 2) {
int fps = recvFrames / elapse;
......@@ -50,7 +52,7 @@ void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const
recvFrames = 0;
recvStartTime = QDateTime::currentMSecsSinceEpoch();
}
}*/
START_SLOT_TIME_COUNTER
if (videoFrame == nullptr)
......@@ -65,37 +67,37 @@ void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const
if (taskQueue.size() >= 4)
qDebug() << "CaptureThread::AddFrame::qsize::" << idx << "\t" << taskQueue.size() << "\n";
tag = "CaptureThread::AddFrame::doing::" + std::to_string(idx);
tags = tag.c_str();
/*tag = "CaptureThread::AddFrame::doing::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
{
std::unique_lock<std::mutex> ulock(mutex);
tag = "CaptureThread::AddFrame::S0::" + std::to_string(idx);
tags = tag.c_str();
sequenceNum++;
/*tag = "CaptureThread::AddFrame::S0::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
//taskQueue.push(videoFrame);
videoFrameData video_data = { videoFrame,timestamp };
videoFrameData video_data = { videoFrame,timestamp,sequenceNum };
taskVideoQueue.push(video_data);
tag = "CaptureThread::AddFrame::S1::" + std::to_string(idx);
tags = tag.c_str();
/*tag = "CaptureThread::AddFrame::S1::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
cv.notify_all();
}
tag = "CaptureThread::AddFrame::End::" + std::to_string(idx);
tags = tag.c_str();
/*tag = "CaptureThread::AddFrame::End::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
int64_t elaspe = QDateTime::currentMSecsSinceEpoch() - st;
/*int64_t elaspe = QDateTime::currentMSecsSinceEpoch() - st;
int64_t elaspe2 = GetCurrTimeMS() - st2;
if (elaspe >= 10)
qDebug() << "====> " << idx << "\t" << elaspe << " " << elaspe2 << "\n";
qDebug() << "====> " << idx << "\t" << elaspe << " " << elaspe2 << "\n";*/
END_SLOT_TIME_COUNTER
}
......@@ -109,6 +111,7 @@ void CaptureThread::run()
START_WAIT_TIME_COUNTER
ComPtr<IDeckLinkVideoInputFrame> videoFrame;
qint64 cur_time = 0;
qint64 sequence = 0;
/*if (taskQueue.WaitFor(videoFrame))
{
END_WAIT_TIME_COUNTER
......@@ -129,6 +132,7 @@ void CaptureThread::run()
auto& video_data = taskVideoQueue.front();
videoFrame = video_data.video_frame;
cur_time = video_data.timestamp;
sequence = video_data.sequenceNum;
taskVideoQueue.pop();
}
......@@ -136,7 +140,7 @@ void CaptureThread::run()
{
//auto cur_time = QDateTime::currentMSecsSinceEpoch();
//qDebug() << "input frame cur time:" << cur_time << "\n";
std::shared_ptr<Image> image = std::make_shared<Image>(videoFrame,cur_time);
std::shared_ptr<Image> image = std::make_shared<Image>(videoFrame,cur_time, sequence);
emit PushFrame(image);
}
......
......@@ -2,9 +2,13 @@
#include "opencv2/opencv.hpp"
#include "libyuv.h"
#define CUTBUFFERMAXSIZE 125*2
int ProcessThread::s_count = 0;
extern int OutputDeleyTime;
extern int FrameRate;
extern int FrontDeleyTime;
extern int BlackBottomHeight;
ProcessThread::ProcessThread()
: sendFrames(0),
......@@ -14,6 +18,7 @@ ProcessThread::ProcessThread()
recvROIStartTime(QDateTime::currentMSecsSinceEpoch())
{
idx = s_count++;
minTaskImageQueueSize = FrontDeleyTime * FrameRate;
}
ProcessThread::~ProcessThread()
......@@ -35,8 +40,14 @@ void ProcessThread::SetUpUDP(const QString hostAddr, const QString hostPort)
void ProcessThread::AddFrame(std::shared_ptr<Image> image)
{
START_SLOT_TIME_COUNTER
if (image->IsValid())
if (image->IsValid()) {
taskImageQueue.Push(image);
if (taskImageQueue.Size() >= minTaskImageQueueSize && !firstMinSize)
{
firstMinSize = true;
}
}
END_SLOT_TIME_COUNTER
}
......@@ -110,6 +121,155 @@ void ProcessThread::ReadDatagrams()
END_SLOT_TIME_COUNTER
}
void ProcessThread::cutRunFront()
{
bool continue_flag = false;
int min_size = FrontDeleyTime * FrameRate;
while (true)
{
std::shared_ptr<Image> image = nullptr;
if (taskImageQueue.WaitFor() && taskImageQueue.Size() >= min_size)//有横屏数据了
//if (taskImageQueue.WaitFor() && firstMinSize)
{
while (taskROIQueue.Size()) //有roimsg数据
{
RoiMessage roi;
taskROIQueue.Pop(roi);
cutRuleMap[roi.Timecode()] = roi;
}
if (taskImageQueue.Front(image))
{
if (!image)
{
taskImageQueue.Pop();
continue;
}
auto timestamp = image->getInputFrameCurTimeStamp();
auto sequence = image->getInputFrameSequence();
//qDebug() << "idx:" << idx << " ,current raw data:" << sequence << "\n";
if (cutRuleMap.size())
{
auto itor = cutRuleMap.find(sequence);
if (itor != cutRuleMap.end())
{
qDebug() << "idx:" << idx << "find rule,sequence: " << sequence << "\n";
lastReceiveMessage = itor->second;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
}
else
{
if (sequence < cutRuleMap.begin()->first)
{
//qDebug() << "idx:" << idx << " sequence: " << sequence << " too small " << "\n";
lastReceiveMessage = cutRuleMap.begin()->second;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
}
else
{
auto end = cutRuleMap.end(); end--;
if (end->second.Timecode() > sequence)
{
auto upper_itor = cutRuleMap.upper_bound(sequence);
qDebug() << "idx:" << idx << " sequence: " << sequence << " losted cut rule ..... and find upper sequence:"<<upper_itor->second.Timecode() << "\n";
auto roimsg = upper_itor->second;
if (upper_itor != cutRuleMap.begin())
{
auto tmp_itor = upper_itor;
--tmp_itor;
auto roi_front = tmp_itor->second;
int add = roimsg.X() + roi_front.X();
lastReceiveMessage.SetX(add / 2);
cutRuleMap[sequence] = lastReceiveMessage;
}
else lastReceiveMessage = roimsg;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
/*for (auto itor = cutRuleMap.begin(); itor != cutRuleMap.end(); itor++)
{
if (itor->first > sequence)
{
auto roimsg = itor->second;
if (itor != cutRuleMap.begin())
{
auto tmp_itor = itor;
--tmp_itor;
auto roi_front = tmp_itor->second;
int add = roimsg.X() + roi_front.X();
lastReceiveMessage.SetX(add / 2);
}
else lastReceiveMessage = roimsg;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
break;
}
}*/
continue;
}
else
{
//qDebug() << "idx:" << idx << " sequence: " << sequence << " wait cut rule..... " << "\n";
//if (!taskROIQueue.WaitUntil(200))
//{
// //qDebug() << "idx:" << idx << " wait 200ms ,and not recv data!!!!!!!!!!!!" << endl;
// WorkCutImage(image, lastReceiveMessage);
// taskImageQueue.Pop();
// cutRuleMap.clear();
//}
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
cutRuleMap.clear();
continue;
}
}
}
}
else
{
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
}
}
}
/*else
{
qDebug() << "idx:" << idx << "taskImageQueue size:"<< taskImageQueue.Size() <<",min_size:"<< min_size << endl;
}*/
while (taskImageQueue.Size() > (FrameRate * (OutputDeleyTime + FrontDeleyTime)))
{
qDebug() << "idx:" << idx << ",----------------lost Image size:" << taskImageQueue.Size() - (OutputDeleyTime + FrontDeleyTime) << endl;
taskImageQueue.Pop(image);
}
if (cutRuleMap.size() > CUTBUFFERMAXSIZE && image)
{
//qDebug() << " idx:" << idx << "rule map start clear data,more size:"<< cutRuleMap.size() - 125 << "\n";
for (auto it = cutRuleMap.begin(); it != cutRuleMap.end();)
{
if (it->first < image->getInputFrameSequence() && cutRuleMap.size() > 5)
{
cutRuleMap.erase(it++);
}
else
{
++it;
}
}
}
}
}
void ProcessThread::cutRun()
{
bool continue_flag = false;
......@@ -291,68 +451,8 @@ void ProcessThread::cutRun()
void ProcessThread::run()
{
cutRun();
//while (true)
//{
// START_WAIT_TIME_COUNTER
// std::shared_ptr<Image> image;
// RoiMessage roi;
// if (taskImageQueue.WaitFor(image))
// {
// END_WAIT_TIME_COUNTER
// if (taskROIQueue.WaitUntil(roi, 1))
// {
// //qDebug() << "wgt------------idx:"<< idx << ", get rio msg data timestamp : " << roi.Timecode() << ", get rio msg data x : " <<roi.X() << "\n";
// if (image->getInputFrameCurTimeStamp() < roi.Timecode())
// {
// cutRuleMap[roi.Timecode()] = roi;
// }
//
// std::vector<qint64> delVec;
// for (auto key : cutRuleMap)
// {
// if (key.first <= image->getInputFrameCurTimeStamp())
// {
// lastReceiveMessage = key.second;
// delVec.push_back(key.first);
// //cutRuleMap.erase(key.first);
// }
// }
// for (auto key : delVec)
// {
// cutRuleMap.erase(key);
// }
// //lastReceiveMessage = roi;
// }
// /*else
// {
// roi = lastReceiveMessage;
// }*/
// //qint64 cut1tm = QDateTime::currentMSecsSinceEpoch();
// /*if (lastReceiveMessage.Timecode() > 0 && image->getInputFrameCurTimeStamp() > lastReceiveMessage.Timecode())
// {
// if(cutRuleMap.find(image->getInputFrameCurTimeStamp()) == cutRuleMap.end())
// qDebug() <<"idx:"<<idx << ",use last roi msg timestamp:--------->" << image->getInputFrameCurTimeStamp() << "\n";
// else
// qDebug() << "idx:" << idx << ",use roi msg is error!!!!!!" << image->getInputFrameCurTimeStamp() << ",right roi msg x:"<< cutRuleMap.find(image->getInputFrameCurTimeStamp())->second.X() << "\n";
// }*/
// WorkCutImage(image, lastReceiveMessage);
// //qDebug() << "idx:" << idx << " ,work cut image duration:" << QDateTime::currentMSecsSinceEpoch() - cut1tm << "\n";
// }
// //qDebug() << "cut image thread id:" << QThread::currentThreadId() << "\n";
// while (taskImageQueue.Size() > (FrameRate * OutputDeleyTime))
// {
// qDebug() <<"idx:"<<idx << ",----------------lost Image size:" << taskImageQueue.Size() - (FrameRate * OutputDeleyTime) << endl;
// taskImageQueue.Pop(image);
// qDebug() << "idx:" << idx << ",lost timestamp:" << image->getInputFrameCurTimeStamp() << "\n";
// }
// while (taskROIQueue.Size() > (FrameRate * OutputDeleyTime))
// {
// taskROIQueue.Pop(roi);
// }
//}
//cutRun();
cutRunFront();
}
void ProcessThread::WorkCutImage(std::shared_ptr<Image>& pImage, RoiMessage& roi)
......@@ -410,7 +510,7 @@ void ProcessThread::WorkCutImage(std::shared_ptr<Image>& pImage, RoiMessage& roi
uint8_t* buff3 = new uint8_t[1920 * 1080 << 2];
memset(buff3, 0, (1920 * 1080 << 2));
libyuv::ARGBCopy(buff2, 1440 << 2, (buff3 + 240 * 4), 1920 << 2, 1440, 1080);
libyuv::ARGBCopy(buff2, 1440 << 2, (buff3 + BlackBottomHeight * 4), 1920 << 2, 1440, 1080);
cv::Mat bgra = cv::Mat(1080, 1920, CV_8UC4, buff3);
pImage->SetMat(bgra);
......@@ -436,7 +536,7 @@ void ProcessThread::WorkCutImage(std::shared_ptr<Image>& pImage, RoiMessage& roi
//PRINT_CURR_TIME(tags);
//qDebug() << "ProcessThread image current time222:" << image->getInputFrameCurTimeStamp() << "\n";
emit PushFrame(pImage);
DEBUG_FUNCTION("taskImageQueue Size: ", taskImageQueue.Size())
//DEBUG_FUNCTION("taskImageQueue Size: ", taskImageQueue.Size())
sendFrames++;
int elapse = (QDateTime::currentMSecsSinceEpoch() - sendStartTime) / 1000;
......
......@@ -4,17 +4,21 @@ AudioPacket::AudioPacket(IDeckLinkAudioInputPacket* audioPacket, qint64& timesta
{
if (audioPacket)
{
sample = audioPacket->GetSampleFrameCount();
size = sample * sizeof(int16_t) * channel;
size = sample * sizeof(qint16) * channel;
void* data = nullptr;
audioPacket->GetBytes(&data);
if (data && size > 0) {
audio_data = QByteArray((char const*)data, size);
//audio_data = QByteArray((char const*)data, size);
buffer = new qint8[size];
memset(buffer,0,size);
memcpy(buffer, data, size);
}
}
}
AudioPacket::~AudioPacket()
{
if (buffer) delete buffer;
}
\ No newline at end of file
......@@ -37,7 +37,7 @@ Image::Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame)
ConvertDeckLinkVideoFrame2Mat(inVideoFrame, mat);
}
Image::Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame, qint64 curtimestamp):inputFrameCurTimeStamp(curtimestamp)
Image::Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const qint64& curtimestamp, const qint64& sequence):inputFrameCurTimeStamp(curtimestamp),sequenceNum(sequence)
{
if (videoFrame->GetWidth() != 1920 || videoFrame->GetHeight() != 1080)
{
......
;BLACK_TOP_HEIGHT 表示裁切后黑边距离顶部的高度 取值范围为0到480
;DELEY_TIME 后延迟(输出延迟)最少2s 而且后延迟要大于前延迟至少1s
;FRONT_DELEY_TIME 前延迟
;PRVW_FLAG 0-表示关闭预览 1-表示打开预览
;FRAME_RATE 表示帧率 如1080p50 50帧率 1080i50 25帧率
;OUTPUT_PLAY_MODE 输出sdi模式 0-表示和输入格式一样 1-表示1080i输出
[DELEYTIME]
DELEY_TIME=2
FRONT_DELEY_TIME=1
FRAME_RATE=25
PRVW_FLAG=0
OUTPUT_PLAY_MODE=0
\ No newline at end of file
OUTPUT_PLAY_MODE=0
BLACK_TOP_HEIGHT=0
This source diff could not be displayed because it is too large. You can view the blob instead.
;BLACK_BOTTOM_HEIGHT 表示裁切后黑边距离底部的高度 取值范围为0到480
;DELEY_TIME 后延迟(输出延迟)最少2s 而且后延迟要大于前延迟至少1s
;FRONT_DELEY_TIME 前延迟
;PRVW_FLAG 0-表示关闭预览 1-表示打开预览
;FRAME_RATE 表示帧率 如1080p50 50帧率 1080i50 25帧率
;OUTPUT_PLAY_MODE 输出sdi模式 0-表示和输入格式一样 1-表示1080i输出
[DELEYTIME]
DELEY_TIME=2
FRONT_DELEY_TIME=1
FRAME_RATE=25
PRVW_FLAG=1
PRVW_FLAG=0
OUTPUT_PLAY_MODE=0
BLACK_BOTTOM_HEIGHT=0
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment