Commit 46675251 by 景炳强

1.add 720p 2.add audio 3.syn frame 4.delay output

parent 8306c261
CaptureThread::AddFrame::fps:: 0 10
ProcessThread::run::fps:: 0 7
CaptureThread::AddFrame::fps:: 2 11
ProcessThread::run::fps:: 2 6
ProcessThread::run::fps:: 3 6
CaptureThread::AddFrame::fps:: 3 11
CaptureThread::AddFrame::fps:: 1 14
ProcessThread::run::fps:: 1 0
CaptureThread::AddFrame::fps:: 0 20
CaptureThread::AddFrame::fps:: 2 19
ProcessThread::run::fps:: 0 15
ProcessThread::run::fps:: 3 15
ProcessThread::run::fps:: 2 15
CaptureThread::AddFrame::fps:: 3 19
CaptureThread::AddFrame::fps:: 1 19
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -131,6 +131,7 @@
<ClInclude Include="include\BlackMagicDesign\ReferenceTime.h" />
<QtMoc Include="include\Threads\ProcessThread.h" />
<ClInclude Include="include\Orbit.h" />
<QtMoc Include="include\Utils\AudioPacket.h" />
<ClInclude Include="include\Utils\Common.h" />
<ClInclude Include="include\Utils\DispatchQueue.h" />
<ClInclude Include="include\Utils\LatencyStatistics.h" />
......@@ -174,6 +175,7 @@
</ClCompile>
<ClCompile Include="src\Threads\CaptureThread.cpp" />
<ClCompile Include="src\Threads\ProcessThread.cpp" />
<ClCompile Include="src\Utils\AudioPacket.cpp" />
<ClCompile Include="src\Utils\Common.cpp" />
<ClCompile Include="src\Utils\Image.cpp" />
<ClCompile Include="src\Utils\LatencyStatistics.cpp" />
......
......@@ -89,6 +89,9 @@
<QtMoc Include="include\Threads\ProcessThread.h">
<Filter>Header Files\Threads</Filter>
</QtMoc>
<QtMoc Include="include\Utils\AudioPacket.h">
<Filter>Header Files\Utils</Filter>
</QtMoc>
</ItemGroup>
<ItemGroup>
<ClInclude Include="include\stdafx.h">
......@@ -223,6 +226,9 @@
<ClCompile Include="src\Threads\ProcessThread.cpp">
<Filter>Source Files\Threads</Filter>
</ClCompile>
<ClCompile Include="src\Utils\AudioPacket.cpp">
<Filter>Source Files\Utils</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<QtUic Include="Form\MomentaMedia.ui">
......
......@@ -13,6 +13,7 @@
#include "Utils/CustomEvents.h"
#include "Utils/ComPtr.h"
#include "Utils/Image.h"
#include "Utils/AudioPacket.h"
#include "Threads/CaptureThread.h"
class DeckLinkInputDevice : public QObject, public IDeckLinkInputCallback
......@@ -59,7 +60,7 @@ public:
signals:
void ArrivedFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame);
void PushAudioFrame(std::shared_ptr<AudioPacket> image);
private:
std::atomic<ULONG> RefCount;
QObject* Owner;
......
......@@ -17,6 +17,7 @@
#include "Utils/Platform.h"
#include "DeckLinkOutputVideoFrame.h"
#include "Utils/Image.h"
#include "Utils/AudioPacket.h"
class DeckLinkOutputDevice : public QObject, public IDeckLinkVideoOutputCallback
{
......@@ -56,6 +57,7 @@ public:
ComPtr<IDeckLinkOutput> getDeckLinkOutput(void) const { return deckLinkOutput; }
public slots:
void AddAudioFrame(std::shared_ptr<AudioPacket> audio_packet);
void AddFrame(std::shared_ptr<Image> image);
private:
......@@ -67,6 +69,7 @@ private:
BMDVideoConnection SelectedOutputConnection;
//
SampleQueue<std::shared_ptr<Image>> outputVideoFrameQueue;
SampleQueue<std::shared_ptr<AudioPacket>> outputAudioFrameQueue;
//ScheduledFramesList scheduledFramesList;
//
uint32_t videoPrerollSize;
......@@ -76,16 +79,20 @@ private:
//
bool seenFirstVideoFrame;
BMDTimeValue startPlaybackTime;
bool first_sleep;
//
//std::mutex mutex;
//std::condition_variable playbackStoppedCondition;
//
std::thread scheduleVideoFramesThread;
std::thread scheduleAudioFramesThread;
//
ScheduledFrameCompletedCallback scheduledFrameCompletedCallback;
// Private methods
void scheduleVideoFramesFunc(void);
void scheduleAudioFramesFunc(void);
void outputAudioFrameFunc(void);
bool waitForReferenceSignalToLock(void);
void checkEndOfPreroll(void);
};
......@@ -19,12 +19,17 @@ public:
~CaptureThread();
public slots:
void AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame);
void AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame,const qint64& timestamp);
signals:
void PushFrame(std::shared_ptr<Image> image);
private:
typedef struct videoFrameData
{
ComPtr<IDeckLinkVideoInputFrame> video_frame;
qint64 timestamp;
}videoFrameData;
static int s_count;
int idx;
//SampleQueue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
......@@ -35,6 +40,7 @@ private:
int64_t recvStartTime;
std::queue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
std::queue<videoFrameData> taskVideoQueue;
std::condition_variable cv;
std::mutex mutex;
};
\ No newline at end of file
......@@ -5,6 +5,7 @@
#include <QWaitCondition>
#include <QUdpSocket>
#include <QJsonDocument>
#include <map>
#include "Utils/SampleQueue.h"
#include "Utils/Image.h"
......@@ -14,20 +15,36 @@ const QString MODE_CROP = "crop_roi";
const QString MODE_STOP = "stop";
const QString MODE_ACK = "checked_ok";
#define CROP1080WIDTH 810
#define CROP720WIDTH 720
#define CROPHEIGHT 1080
#define USE_1080P 1
class RoiMessage
{
public:
RoiMessage() : w(800), h(1080)
#if USE_1080P
RoiMessage() : w(CROP1080WIDTH), h(CROPHEIGHT)
{
x = 1920 / 2 - w / 2;
y = 1080 / 2 - h / 2;
timecode = 0;
}
#else
RoiMessage() : w(CROP720WIDTH), h(CROPHEIGHT)
{
x = 1920 / 2 - w / 2;
y = 1080 / 2 - h / 2;
timecode = 0;
}
#endif
RoiMessage(QByteArray& data)
{
QJsonDocument document = QJsonDocument::fromJson(data);
QJsonObject object;
if(document.isObject())
if (document.isObject())
{
object = document.object();
mode = object.value("signal").toString();
......@@ -40,6 +57,7 @@ public:
width = object.value("width").toInt();
height = object.value("height").toInt();
timecode = QString::number(object.value("timecode").toDouble(), 'f', 0).toLongLong();
w = maxx - minx;
h = maxy - miny;
......@@ -65,6 +83,7 @@ public:
width = object.value("width").toInt();
height = object.value("height").toInt();
timecode = QString::number(object.value("timecode").toDouble(), 'f', 0).toLongLong();
w = maxx - minx;
h = maxy - miny;
......@@ -73,7 +92,7 @@ public:
}
}
RoiMessage(const RoiMessage& other) : x(other.x), y(other.y), w(other.w), h(other.h)
RoiMessage(const RoiMessage& other) : x(other.x), y(other.y), w(other.w), h(other.h), timecode(other.timecode)
{
}
......@@ -84,6 +103,7 @@ public:
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
}
RoiMessage operator=(const RoiMessage& other)
......@@ -92,6 +112,7 @@ public:
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
return *this;
}
......@@ -101,6 +122,7 @@ public:
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
return *this;
}
......@@ -109,10 +131,16 @@ public:
return x > 0 && y > 0 && w > 0 && h > 0;
}
void SetX(int x_)
{
this->x = x_;
}
int X() { return x; }
int Y() { return y; }
int Width() { return w; }
int Height() { return h; }
qint64 Timecode() { return timecode; }
private:
int x;
......@@ -121,6 +149,7 @@ private:
int h;
QString mode;
qint64 timecode;
int id = 0;
int width;
int height;
......@@ -138,17 +167,22 @@ public:
public slots:
void AddFrame(std::shared_ptr<Image> image);
void ReadDatagrams();
void ReadDatagramsNew();
signals:
void PushFrame(std::shared_ptr<Image> image);
protected:
void run() override;
private:
void WorkCutImage(std::shared_ptr<Image>& pImage, RoiMessage& roi);
void WorkCutImage720p(std::shared_ptr<Image>& pImage, RoiMessage& roi);
void cutRun();
private:
//SampleQueue<Image> taskPrerollQueue;
SampleQueue<std::shared_ptr<Image>> taskImageQueue;
SampleQueue<RoiMessage> taskROIQueue;
std::map<qint64, RoiMessage> cutRuleMap;
QUdpSocket* udpSocket;
......@@ -166,4 +200,7 @@ private:
int recvROIs;
int64_t recvROIStartTime;
int continuousLostNums;//ʾղudp
};
\ No newline at end of file
#pragma once
#include <QtCore/QtCore>
#include <QMetaType>
#include "DeckLinkAPI.h"
class AudioPacket : public QObject
{
Q_OBJECT
public:
AudioPacket(IDeckLinkAudioInputPacket* audioPacket,qint64& timestamp);
~AudioPacket();
public:
QByteArray audio_data;
qint32 sample;
qint32 channel;
qint32 size;
qint64 frame_time_stamp;
};
......@@ -20,8 +20,12 @@ public:
void Push(const T& sample);
void Push(T&& sample);
bool Pop(T& sample);
bool Pop();
bool Front(T& sample);
bool WaitFor(T& sample);
bool WaitFor();
bool WaitUntil(T& sample, int timeout);
bool WaitUntil(int timeout);
void CancelWaiters(void);
void Reset(void);
......@@ -104,6 +108,28 @@ bool SampleQueue<T>::Pop(T& sample)
}
template <typename T>
bool SampleQueue<T>::Pop()
{
std::lock_guard<std::mutex> locker(mutex);
if (queue.empty())
return false;
queue.pop();
return true;
}
template <typename T>
bool SampleQueue<T>::Front(T& sample)
{
std::lock_guard<std::mutex> locker(mutex);
if (queue.empty())
return false;
//sample = std::move(queue.front());
sample = queue.front();
return true;
}
template <typename T>
bool SampleQueue<T>::WaitFor(T& sample)
{
// Blocking wait for sample
......@@ -121,6 +147,38 @@ bool SampleQueue<T>::WaitFor(T& sample)
}
template <typename T>
bool SampleQueue<T>::WaitFor()
{
// Blocking wait for sample
std::unique_lock<std::mutex> locker(mutex);
queueCondition.wait(locker, [&] {return !queue.empty() || waitCancelled; });
if (waitCancelled)
return false;
else if (!queue.empty())
{
return true;
}
return true;
}
template <typename T>
bool SampleQueue<T>::WaitUntil(int timeout)
{
std::unique_lock<std::mutex> locker(mutex);
auto delay = std::chrono::system_clock::now() + std::chrono::milliseconds(timeout);
queueCondition.wait_until(locker, delay, [&] {return !queue.empty() || waitCancelled; });
if (waitCancelled || queue.empty())
return false;
else if (!queue.empty())
{
return true;
}
return true;
}
template <typename T>
bool SampleQueue<T>::WaitUntil(T& sample, int timeout)
{
// Blocking wait for sample
......
......@@ -78,7 +78,15 @@ HRESULT DeckLinkInputDevice::VideoInputFrameArrived(IDeckLinkVideoInputFrame* vi
ComPtr<IDeckLinkVideoInputFrame> frame = ComPtr<IDeckLinkVideoInputFrame>(videoFrame);
//emit ArrivedFrame(frame);
if (Capture) {
Capture->AddFrame(frame);
auto cur_time = QDateTime::currentMSecsSinceEpoch();
Capture->AddFrame(frame, cur_time);
}
if(audioPacket)
{
//qDebug() << "DeckLinkInputDevice get audio packet--------------" << "\n";
auto cur_time = QDateTime::currentMSecsSinceEpoch();
std::shared_ptr<AudioPacket> audio_ptr = std::make_shared<AudioPacket>(audioPacket, cur_time);
emit PushAudioFrame(audio_ptr);
}
/*std::unique_lock<std::mutex> ulock(mutex);
......@@ -197,6 +205,7 @@ bool DeckLinkInputDevice::Initialize()
if (deckLinkAttributes->GetInt(BMDDeckLinkVideoInputConnections, &SupportedInputConnections) != S_OK)
SupportedInputConnections = 0;
return true;
}
......@@ -215,11 +224,14 @@ bool DeckLinkInputDevice::StartCapture(BMDDisplayMode displayMode, IDeckLinkScre
// Set capture callback
DeckLinkInput->SetCallback(this);
// Set the video input mode
if (DeckLinkInput->EnableVideoInput(displayMode, bmdFormat10BitYUV, videoInputFlags) != S_OK)
return false;
if (DeckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, bmdAudioSampleType16bitInteger, 2) != S_OK)
return false;
// Set the capture
if (DeckLinkInput->StartStreams() != S_OK)
return false;
......
......@@ -10,6 +10,8 @@
/// DeckLinkOpenGLOverlay
///
extern int PrvwFlag;
DeckLinkOpenGLOverlayWidget::DeckLinkOpenGLOverlayWidget(QWidget* parent) : QWidget(parent)
{
Delegate = new DeckLinkPreviewOverlay(this);
......@@ -35,7 +37,7 @@ DeckLinkOpenGLWidget::DeckLinkOpenGLWidget(QWidget* parent) : QOpenGLWidget(pare
GetDeckLinkOpenGLScreenPreviewHelper(DeckLinkScreenPreviewHelper);
Delegate = MakeComPtr<ScreenPreviewCallback>();
connect(Delegate.Get(), &ScreenPreviewCallback::FrameArrived, this, &DeckLinkOpenGLWidget::SetFrame, Qt::QueuedConnection);
if(PrvwFlag) connect(Delegate.Get(), &ScreenPreviewCallback::FrameArrived, this, &DeckLinkOpenGLWidget::SetFrame, Qt::QueuedConnection);
OverlayWidget = new DeckLinkOpenGLOverlayWidget(this);
}
......
......@@ -6,6 +6,8 @@
extern int OutputDeleyTime;
#define OUTPUT_1080 1
DeckLinkOutputDevice::DeckLinkOutputDevice(ComPtr<IDeckLink>& decklink, int videoPrerollSize)
: RefCount(1),
state(PlaybackState::Idle),
......@@ -14,7 +16,8 @@ DeckLinkOutputDevice::DeckLinkOutputDevice(ComPtr<IDeckLink>& decklink, int vide
videoPrerollSize(1),
seenFirstVideoFrame(false),
startPlaybackTime(0),
scheduledFrameCompletedCallback(nullptr)
scheduledFrameCompletedCallback(nullptr),
first_sleep(false)
{
// Check that device has an output interface, this will throw an error if using a capture-only device such as DeckLink Mini Recorder
if (!deckLinkOutput)
......@@ -116,6 +119,12 @@ HRESULT DeckLinkOutputDevice::ScheduledPlaybackHasStopped()
bool DeckLinkOutputDevice::StartPlayback(BMDDisplayMode displayMode, bool enable3D, BMDPixelFormat pixelFormat, bool requireReferenceLocked, IDeckLinkScreenPreviewCallback* screenPreviewCallback)
{
BMDDisplayMode outputDisplayMode;
#if OUTPUT_1080
outputDisplayMode = displayMode;
#else
outputDisplayMode = BMDDisplayMode::bmdModeHD720p50;
#endif
// Pass through RP188 timecode and VANC from input frame. VITC timecode is forwarded with VANC
BMDVideoOutputFlags outputFlags = (BMDVideoOutputFlags)(bmdVideoOutputRP188 | bmdVideoOutputVANC);
ComPtr<IDeckLinkDisplayMode> deckLinkDisplayMode;
......@@ -130,12 +139,12 @@ bool DeckLinkOutputDevice::StartPlayback(BMDDisplayMode displayMode, bool enable
state = PlaybackState::Starting;
}
if((deckLinkOutput->DoesSupportVideoMode(bmdVideoConnectionUnspecified, displayMode, pixelFormat, bmdNoVideoOutputConversion, supportedVideoModeFlags, nullptr, &displayModeSupported) != S_OK) || !displayModeSupported)
if((deckLinkOutput->DoesSupportVideoMode(bmdVideoConnectionUnspecified, outputDisplayMode, pixelFormat, bmdNoVideoOutputConversion, supportedVideoModeFlags, nullptr, &displayModeSupported) != S_OK) || !displayModeSupported)
{
return false;
}
if (deckLinkOutput->GetDisplayMode(displayMode, deckLinkDisplayMode.ReleaseAndGetAddressOf()) != S_OK)
if (deckLinkOutput->GetDisplayMode(outputDisplayMode, deckLinkDisplayMode.ReleaseAndGetAddressOf()) != S_OK)
return false;
if (deckLinkDisplayMode->GetFrameRate(&frameDuration, &frameTimescale) != S_OK)
......@@ -148,7 +157,10 @@ bool DeckLinkOutputDevice::StartPlayback(BMDDisplayMode displayMode, bool enable
if (deckLinkOutput->SetScheduledFrameCompletionCallback(this) != S_OK)
return false;
if (deckLinkOutput->EnableVideoOutput(displayMode, outputFlags) != S_OK)
if (deckLinkOutput->EnableVideoOutput(outputDisplayMode, outputFlags) != S_OK)
return false;
if (deckLinkOutput->EnableAudioOutput(bmdAudioSampleRate48kHz,bmdAudioSampleType16bitInteger,2,bmdAudioOutputStreamTimestamped) != S_OK)
return false;
deckLinkOutput->SetScreenPreviewCallback(screenPreviewCallback);
......@@ -160,9 +172,10 @@ bool DeckLinkOutputDevice::StartPlayback(BMDDisplayMode displayMode, bool enable
}
outputVideoFrameQueue.Reset();
outputAudioFrameQueue.Reset();
scheduleVideoFramesThread = std::thread(&DeckLinkOutputDevice::scheduleVideoFramesFunc, this);
scheduleAudioFramesThread = std::thread(&DeckLinkOutputDevice::scheduleAudioFramesFunc, this);
{
//std::lock_guard<std::mutex> locker(mutex);
state = PlaybackState::Prerolling;
......@@ -193,9 +206,13 @@ void DeckLinkOutputDevice::StopPlayback()
}
outputVideoFrameQueue.CancelWaiters();
outputAudioFrameQueue.CancelWaiters();
if (scheduleVideoFramesThread.joinable())
scheduleVideoFramesThread.join();
if (scheduleAudioFramesThread.joinable())
scheduleAudioFramesThread.join();
}
// In scheduled playback is running, stop video and audio streams immediately
......@@ -211,6 +228,8 @@ void DeckLinkOutputDevice::StopPlayback()
// Disable video
deckLinkOutput->DisableVideoOutput();
// Disable audio
deckLinkOutput->DisableAudioOutput();
// Dereference DeckLinkOutputDevice delegate from callbacks
deckLinkOutput->SetScheduledFrameCompletionCallback(nullptr);
......@@ -264,6 +283,14 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
return state != PlaybackState::Idle;
}
void DeckLinkOutputDevice::outputAudioFrameFunc(void)
{
while (true)
{
}
}
void DeckLinkOutputDevice::scheduleVideoFramesFunc()
{
while(true)
......@@ -280,8 +307,8 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
qDebug() << "input frame cur time:" << input_time << " now time:" <<now_time << " dever time:"<<dever_time << "\n";
if (dever_time < OutputDeleyTime * 1000)
{
//qDebug() << "sleep ms:" << 5 * 1000 - dever_time << "\n";
Sleep(OutputDeleyTime * 1000 - dever_time);
std::this_thread::sleep_for(std::chrono::milliseconds(OutputDeleyTime * 1000 - dever_time));
}
END_WAIT_TIME_COUNTER
//std::lock_guard<std::mutex> locker(mutex);
......@@ -305,6 +332,7 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
break;
}
//scheduledFramesList.push_back(outputFrame);
//checkEndOfPreroll();
......@@ -316,6 +344,33 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
}
}
void DeckLinkOutputDevice::scheduleAudioFramesFunc(void)
{
while (true)
{
std::shared_ptr<AudioPacket> audio_packet;
if (outputAudioFrameQueue.WaitFor(audio_packet))
{
auto data = audio_packet->audio_data.data();
auto sample = audio_packet->sample;
quint32 sampleFramesWritten;
HRESULT ret = deckLinkOutput->WriteAudioSamplesSync(data,sample,&sampleFramesWritten);
if(ret == S_OK)
{
if (sampleFramesWritten < sample)
{
}
}
else
{
qDebug() << "Unable to schedule output audio frame" << "\n";
break;
}
}
}
}
bool DeckLinkOutputDevice::waitForReferenceSignalToLock()
{
ComPtr<IDeckLinkStatus> deckLinkStatus(IID_IDeckLinkStatus, deckLink);
......@@ -359,8 +414,19 @@ void DeckLinkOutputDevice::AddFrame(std::shared_ptr<Image> image)
return;
if (outputVideoFrameQueue.Size() >= 4)
qDebug() << "DeckLinkOutputDevice::AddFrame::qsize::" << "\t" << outputVideoFrameQueue.Size() << "\n";
qDebug() << "DeckLinkOutputDevice::AddFrame video---qsize:" << "\t" << outputVideoFrameQueue.Size() << "\n";
outputVideoFrameQueue.Push(image);
END_SLOT_TIME_COUNTER
}
void DeckLinkOutputDevice::AddAudioFrame(std::shared_ptr<AudioPacket> audio_packet)
{
START_SLOT_TIME_COUNTER
if (outputAudioFrameQueue.Size() >= 4)
qDebug() << "DeckLinkOutputDevice::AddAudioFrame audio---qsize:" << "\t" << outputAudioFrameQueue.Size() << "\n";
if (audio_packet)
{
outputAudioFrameQueue.Push(audio_packet);
}
END_SLOT_TIME_COUNTER
}
......@@ -252,6 +252,7 @@ void DeckLinkOutputPage::RequestedDeviceGranted(ComPtr<IDeckLink>& device)
// TODO: Connect
connect(BindingInputPage->GetCapture(), &CaptureThread::PushFrame, Process.get(), &ProcessThread::AddFrame);
connect(Process.get(), &ProcessThread::PushFrame, SelectedDevice.Get(), &DeckLinkOutputDevice::AddFrame);
connect(BindingInputPage->GetSelectedDevice().Get(), &DeckLinkInputDevice::PushAudioFrame, SelectedDevice.Get(), &DeckLinkOutputDevice::AddAudioFrame);
connect(BindingInputPage, &DeckLinkInputPage::FormatChanged, this, &DeckLinkOutputPage::FormatChanged);
......
......@@ -12,15 +12,18 @@
int OutputDeleyTime = 5;
int FrameRate = 50;
int PrvwFlag = 0;
qint64 StartTimeStamp = 0;
MomentaMedia::MomentaMedia(QWidget *parent)
: QMainWindow(parent)
{
ReadSettings();
ui.setupUi(this);
StartTimeStamp = QDateTime::currentMSecsSinceEpoch();
setWindowFlags(Qt::Window | Qt::WindowMinimizeButtonHint | Qt::WindowMaximizeButtonHint | Qt::WindowCloseButtonHint);
InputDevicePages[0] = ui.devicePage1;
InputDevicePages[1] = ui.devicePage2;
InputDevicePages[2] = ui.devicePage3;
......@@ -41,7 +44,6 @@ MomentaMedia::MomentaMedia(QWidget *parent)
DeleyTimeEdit->setValidator(intValidator);
connect(DeleyTimeEdit,&QLineEdit::textChanged,this,&MomentaMedia::DeleyTimeTextChanged);
ReadSettings();
QSize previewViewSize = ui.previewContainer->size();
......@@ -503,6 +505,10 @@ void MomentaMedia::ReadSettings()
QSettings settings(ini_path, QSettings::IniFormat);
settings.beginGroup("DELEYTIME");
OutputDeleyTime = settings.value("DELEY_TIME", "").toInt();
if (OutputDeleyTime < 1) OutputDeleyTime = 1;
FrameRate = settings.value("FRAME_RATE", "").toInt();
if (FrameRate < 25) FrameRate = 25;
PrvwFlag = settings.value("PRVW_FLAG", "").toInt();
//qDebug() << "deleyTime=" << deleyTime << endl;
settings.endGroup();
}
\ No newline at end of file
......@@ -7,6 +7,8 @@
#include <atomic>
#include "NDI/NDIOutputThread.h"
extern qint64 StartTimeStamp;
NDIOutputThread::NDIOutputThread(const QString& Name, int w, int h) : NDISenderName(Name), width(w), height(h), isSending(false), Instance(nullptr)
{
}
......@@ -93,7 +95,8 @@ void NDIOutputThread::run()
if(frame->IsValid())
{
frame->Fill(Frame.p_data, Frame.xres * Frame.yres * 4);
//qDebug() << "wgt-----------------------------"<<"ndi(" << NDISenderName << ")timestamp : " << frame->getInputFrameCurTimeStamp() - StartTimeStamp << "\n";
Frame.timestamp = frame->getInputFrameCurTimeStamp();
NDIlib_send_send_video_v2(Instance, &Frame);
}
taskQueue.Pop(frame);
......
......@@ -32,7 +32,7 @@ CaptureThread::~CaptureThread()
}
void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame)
void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const qint64& timestamp)
{
int64_t st = QDateTime::currentMSecsSinceEpoch();
int64_t st2 = GetCurrTimeMS();
......@@ -77,7 +77,9 @@ void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame)
tags = tag.c_str();
//PRINT_CURR_TIME(tags);
taskQueue.push(videoFrame);
//taskQueue.push(videoFrame);
videoFrameData video_data = { videoFrame,timestamp };
taskVideoQueue.push(video_data);
tag = "CaptureThread::AddFrame::S1::" + std::to_string(idx);
tags = tag.c_str();
......@@ -106,6 +108,7 @@ void CaptureThread::run()
START_WAIT_TIME_COUNTER
ComPtr<IDeckLinkVideoInputFrame> videoFrame;
qint64 cur_time = 0;
/*if (taskQueue.WaitFor(videoFrame))
{
END_WAIT_TIME_COUNTER
......@@ -120,17 +123,19 @@ void CaptureThread::run()
{
std::unique_lock<std::mutex> ulock(mutex);
while (taskQueue.empty()) {
while (taskVideoQueue.empty()) {
cv.wait(ulock);
}
videoFrame = taskQueue.front();
taskQueue.pop();
auto& video_data = taskVideoQueue.front();
videoFrame = video_data.video_frame;
cur_time = video_data.timestamp;
taskVideoQueue.pop();
}
if (videoFrame.Get() != nullptr)
{
auto cur_time = QDateTime::currentMSecsSinceEpoch();
qDebug() << "input frame cur time:" << cur_time << "\n";
//auto cur_time = QDateTime::currentMSecsSinceEpoch();
//qDebug() << "input frame cur time:" << cur_time << "\n";
std::shared_ptr<Image> image = std::make_shared<Image>(videoFrame,cur_time);
emit PushFrame(image);
}
......
......@@ -4,11 +4,13 @@
int ProcessThread::s_count = 0;
extern int OutputDeleyTime;
extern int FrameRate;
ProcessThread::ProcessThread()
: sendFrames(0),
sendStartTime(QDateTime::currentMSecsSinceEpoch()),
recvROIs(0),
continuousLostNums(0),
recvROIStartTime(QDateTime::currentMSecsSinceEpoch())
{
idx = s_count++;
......@@ -39,21 +41,58 @@ void ProcessThread::AddFrame(std::shared_ptr<Image> image)
END_SLOT_TIME_COUNTER
}
void ProcessThread::ReadDatagrams()
void ProcessThread::ReadDatagramsNew()
{
START_SLOT_TIME_COUNTER
QHostAddress inClientAddr;
quint16 inClientPort;
QByteArray data;
data.clear();
while(udpSocket->hasPendingDatagrams())
while (udpSocket->hasPendingDatagrams())
{
data.resize(udpSocket->pendingDatagramSize());
udpSocket->readDatagram(data.data(), data.size(), &inClientAddr, &inClientPort);
//qDebug()<< "recv time:" << QDateTime::currentMSecsSinceEpoch() << "\t" << "recv udp data:" << idx << "\t" << data.data() << "\n";
}
//qDebug() << "read udp data thread id:" << QThread::currentThreadId() << "\n";
if (!data.isEmpty())
{
taskROIQueue.Push(RoiMessage(data));
/*recvROIs++;
int64_t elapse = QDateTime::currentMSecsSinceEpoch() - recvROIStartTime;
if (elapse >= 5 * 1000) {
float val = recvROIs * 1000.0 / elapse;
qDebug() << fixed << qSetRealNumberPrecision(2) << "PPProcessThread::run::recvROIs::" << idx << "\t" << val << "\n";
recvROIs = 0;
recvROIStartTime = QDateTime::currentMSecsSinceEpoch();
}*/
}
END_SLOT_TIME_COUNTER
}
void ProcessThread::ReadDatagrams()
{
START_SLOT_TIME_COUNTER
QHostAddress inClientAddr;
quint16 inClientPort;
QByteArray data;
data.clear();
while(udpSocket->hasPendingDatagrams())
{
data.resize(udpSocket->pendingDatagramSize());
udpSocket->readDatagram(data.data(), data.size(), &inClientAddr, &inClientPort);
/*if (!data.isEmpty())
{
RoiMessage msg = RoiMessage(data);
qDebug() << "recv time:" << QDateTime::currentMSecsSinceEpoch() << "\t" << "recv udp data:" << idx << "\t" << data.data() << "chazhi:" << QDateTime::currentMSecsSinceEpoch() - msg.Timecode()<< "\n";
}*/
}
//qDebug() << "read udp data thread id:" << QThread::currentThreadId() << "\n";
if(!data.isEmpty())
{
taskROIQueue.Push(RoiMessage(data));
......@@ -71,123 +110,371 @@ void ProcessThread::ReadDatagrams()
END_SLOT_TIME_COUNTER
}
void ProcessThread::run()
void ProcessThread::cutRun()
{
bool continue_flag = false;
while (true)
{
START_WAIT_TIME_COUNTER
std::shared_ptr<Image> image;
RoiMessage roi;
if(taskImageQueue.WaitFor(image))
std::shared_ptr<Image> image = nullptr;
if (taskImageQueue.WaitFor())//к
{
END_WAIT_TIME_COUNTER
//qDebug() << "ProcessThread image current time111:" << image->getInputFrameCurTimeStamp() << "\n";
if (taskROIQueue.WaitUntil(roi, 1))
while (taskROIQueue.Size()) //roimsg
{
RoiMessage roi;
taskROIQueue.Pop(roi);
cutRuleMap[roi.Timecode()] = roi;
}
if (taskImageQueue.Front(image))
{
lastReceiveMessage = roi;
qDebug() << "idx:" <<idx << " ,current raw data:"<<image->getInputFrameCurTimeStamp() << "\n";
if (!image)
{
taskImageQueue.Pop();
continue;
}
if (cutRuleMap.size())
{
auto itor = cutRuleMap.find(image->getInputFrameCurTimeStamp());
if (itor != cutRuleMap.end())
{
qDebug() << "idx:" << idx << "find rule,timecode: "<<image->getInputFrameCurTimeStamp() << "\n";
lastReceiveMessage = itor->second;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
}
else
{
if (image->getInputFrameCurTimeStamp() < cutRuleMap.begin()->first)
{
qDebug() << "idx:" << idx << " timecode: " << image->getInputFrameCurTimeStamp()<<" too small " << "\n";
lastReceiveMessage = cutRuleMap.begin()->second;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
}
else
{
auto end = cutRuleMap.end(); end--;
if (end->second.Timecode() > image->getInputFrameCurTimeStamp())
{
qDebug() << "idx:" << idx << " timecode: " << image->getInputFrameCurTimeStamp() << " do lost frame ..... " << "\n";
for (auto itor = cutRuleMap.begin(); itor != cutRuleMap.end(); itor++)
{
if (itor->first > image->getInputFrameCurTimeStamp())
{
auto roimsg = itor->second;
if (itor != cutRuleMap.begin())
{
auto tmp_itor = itor;
--tmp_itor;
auto roi_front = tmp_itor->second;
int add = roimsg.X() + roi_front.X();
lastReceiveMessage.SetX(add / 2);
}
else lastReceiveMessage = roimsg;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
break;
}
}
continue;
}
else
{
qDebug() << "idx:" << idx << " timecode: " << image->getInputFrameCurTimeStamp() << " wait cut rule..... " << "\n";
if (!taskROIQueue.WaitUntil(200))
{
qDebug() << "idx:" << idx << " wait 200ms ,and not recv data!!!!!!!!!!!!" << endl;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
cutRuleMap.clear();
}
continue;
}
}
//if (continue_flag)
//{
// continue_flag = false;
// continue;
//}
//if (image->getInputFrameCurTimeStamp() < cutRuleMap.begin()->first)
//{
// WorkCutImage(image, lastReceiveMessage);
// taskImageQueue.Pop();
//}
//else
//{
// //taskROIQueue.WaitFor();
// taskROIQueue.WaitUntil(50);
// continue;
//}
}
}
else
{
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
}
}
}
while (taskImageQueue.Size() > (FrameRate * OutputDeleyTime))
{
qDebug() << "idx:" << idx << ",----------------lost Image size:" << taskImageQueue.Size() - (FrameRate * OutputDeleyTime) << endl;
taskImageQueue.Pop(image);
//qDebug() << "idx:" << idx << ",lost timestamp:" << image->getInputFrameCurTimeStamp() << "\n";
/*auto find_itor = cutRuleMap.find(image->getInputFrameCurTimeStamp());
if (find_itor == cutRuleMap.end())
{
auto end = cutRuleMap.end(); end--;
if (end->second.Timecode() > image->getInputFrameCurTimeStamp())
{
for (auto itor = cutRuleMap.begin(); itor != cutRuleMap.end(); itor++)
{
if (itor->first > image->getInputFrameCurTimeStamp())
{
auto roimsg = itor->second;
if (itor != cutRuleMap.begin())
{
auto tmp_itor = itor;
--tmp_itor;
auto roi_front = tmp_itor->second;
int add = roimsg.X() + roi_front.X();
lastReceiveMessage.SetX(add / 2);
}
else lastReceiveMessage = roimsg;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
break;
}
}
}
else
{
lastReceiveMessage = find_itor->second;
WorkCutImage(image, lastReceiveMessage);
}
}
else
{
roi = lastReceiveMessage;
WorkCutImage(image, lastReceiveMessage);
}*/
}
if(cutRuleMap.size() > 125)
{
//qDebug() << " idx:" << idx << "rule map start clear data,more size:"<< cutRuleMap.size() - 125 << "\n";
for (auto it = cutRuleMap.begin(); it != cutRuleMap.end();)
{
if (it->first < image->getInputFrameCurTimeStamp() && cutRuleMap.size() > 5)
{
cutRuleMap.erase(it++);
}
else
{
++it;
}
}
//roi = lastReceiveMessage;
}
//START_TIME_COUNTER_BASE(OpenCV)
}
}
/*cv::Rect cvroi(roi.X(), roi.Y(), roi.Width(), roi.Height());
cv::Mat mat = image->GetMat().clone();
void ProcessThread::run()
{
cutRun();
//while (true)
//{
// START_WAIT_TIME_COUNTER
// std::shared_ptr<Image> image;
// RoiMessage roi;
// if (taskImageQueue.WaitFor(image))
// {
// END_WAIT_TIME_COUNTER
// if (taskROIQueue.WaitUntil(roi, 1))
// {
// //qDebug() << "wgt------------idx:"<< idx << ", get rio msg data timestamp : " << roi.Timecode() << ", get rio msg data x : " <<roi.X() << "\n";
// if (image->getInputFrameCurTimeStamp() < roi.Timecode())
// {
// cutRuleMap[roi.Timecode()] = roi;
// }
//
// std::vector<qint64> delVec;
// for (auto key : cutRuleMap)
// {
// if (key.first <= image->getInputFrameCurTimeStamp())
// {
// lastReceiveMessage = key.second;
// delVec.push_back(key.first);
// //cutRuleMap.erase(key.first);
// }
// }
// for (auto key : delVec)
// {
// cutRuleMap.erase(key);
// }
// //lastReceiveMessage = roi;
// }
// /*else
// {
// roi = lastReceiveMessage;
// }*/
// //qint64 cut1tm = QDateTime::currentMSecsSinceEpoch();
// /*if (lastReceiveMessage.Timecode() > 0 && image->getInputFrameCurTimeStamp() > lastReceiveMessage.Timecode())
// {
// if(cutRuleMap.find(image->getInputFrameCurTimeStamp()) == cutRuleMap.end())
// qDebug() <<"idx:"<<idx << ",use last roi msg timestamp:--------->" << image->getInputFrameCurTimeStamp() << "\n";
// else
// qDebug() << "idx:" << idx << ",use roi msg is error!!!!!!" << image->getInputFrameCurTimeStamp() << ",right roi msg x:"<< cutRuleMap.find(image->getInputFrameCurTimeStamp())->second.X() << "\n";
// }*/
// WorkCutImage(image, lastReceiveMessage);
// //qDebug() << "idx:" << idx << " ,work cut image duration:" << QDateTime::currentMSecsSinceEpoch() - cut1tm << "\n";
// }
// //qDebug() << "cut image thread id:" << QThread::currentThreadId() << "\n";
// while (taskImageQueue.Size() > (FrameRate * OutputDeleyTime))
// {
// qDebug() <<"idx:"<<idx << ",----------------lost Image size:" << taskImageQueue.Size() - (FrameRate * OutputDeleyTime) << endl;
// taskImageQueue.Pop(image);
// qDebug() << "idx:" << idx << ",lost timestamp:" << image->getInputFrameCurTimeStamp() << "\n";
// }
// while (taskROIQueue.Size() > (FrameRate * OutputDeleyTime))
// {
// taskROIQueue.Pop(roi);
// }
//}
}
cv::UMat umat4Image(mat.rows, mat.cols, CV_8UC3);
cv::UMat umat4RotatedImage(810, 1080, CV_8UC3);
void ProcessThread::WorkCutImage(std::shared_ptr<Image>& pImage, RoiMessage& roi)
{
//roi = lastReceiveMessage;
cv::cvtColor(mat, umat4Image, cv::COLOR_BGRA2BGR);
//START_TIME_COUNTER_BASE(OpenCV)
cv::UMat umat4ClippedImage = umat4Image(cvroi);
/*cv::Rect cvroi(roi.X(), roi.Y(), roi.Width(), roi.Height());
cv::rotate(umat4ClippedImage, umat4RotatedImage, cv::ROTATE_90_CLOCKWISE);
cv::Mat mat = image->GetMat().clone();
cv::UMat umat4ResizedImage;
cv::resize(umat4RotatedImage, umat4ResizedImage, cv::Size(1440, 1080));
cv::UMat umat4FinalImage = cv::UMat::zeros(cv::Size(1920, 1080), CV_8UC3);
umat4ResizedImage.copyTo(umat4FinalImage(cv::Rect(240, 0, 1440, 1080)));
cv::UMat umat4Image(mat.rows, mat.cols, CV_8UC3);
cv::UMat umat4RotatedImage(810, 1080, CV_8UC3);
cv::Mat finalmat;
cv::cvtColor(umat4FinalImage, finalmat, cv::COLOR_BGR2BGRA);
image->SetMat(finalmat);*/
cv::cvtColor(mat, umat4Image, cv::COLOR_BGRA2BGR);
std::string tag = "ProcessThread::run::start::" + std::to_string(idx);
const char* tags = tag.c_str();
cv::UMat umat4ClippedImage = umat4Image(cvroi);
//PRINT_CURR_TIME(tags);
cv::rotate(umat4ClippedImage, umat4RotatedImage, cv::ROTATE_90_CLOCKWISE);
//if (taskImageQueue.Size() >= 4)
// qDebug() << "ProcessThread::run::qsize::" << idx << "\t" << taskImageQueue.Size() << "\n";
cv::UMat umat4ResizedImage;
cv::resize(umat4RotatedImage, umat4ResizedImage, cv::Size(1440, 1080));
//qDebug() << "ROI- " << idx << " " << roi.X() << " " << roi.Y() << " " << roi.Width() << " " << roi.Height() << "\n";
size_t size = roi.Width() * roi.Height() << 2;
uint8_t *buff1 = new uint8_t[size];
libyuv::ConvertToARGB(image->GetBytes(), (1920 * 1080 << 2), buff1, (roi.Height() << 2),
roi.X(), roi.Y(), 1920, 1080, roi.Width(), roi.Height(),
libyuv::kRotate90, libyuv::FOURCC_ARGB);
cv::UMat umat4FinalImage = cv::UMat::zeros(cv::Size(1920, 1080), CV_8UC3);
umat4ResizedImage.copyTo(umat4FinalImage(cv::Rect(240, 0, 1440, 1080)));
uint8_t *buff2 = new uint8_t[1440 * 1080 << 2];
libyuv::ARGBScale(buff1, (roi.Height() << 2), roi.Height(), roi.Width(),
buff2, 1440 << 2, 1440, 1080, libyuv::FilterMode::kFilterNone);
cv::Mat finalmat;
cv::cvtColor(umat4FinalImage, finalmat, cv::COLOR_BGR2BGRA);
uint8_t *buff3 = new uint8_t[1920 * 1080 << 2];
memset(buff3, 0, (1920 * 1080 << 2));
libyuv::ARGBCopy(buff2, 1440 << 2, (buff3 + 240 * 4), 1920 << 2, 1440, 1080);
image->SetMat(finalmat);*/
cv::Mat bgra = cv::Mat(1080, 1920, CV_8UC4, buff3);
image->SetMat(bgra);
/*std::string tag = "ProcessThread::run::start::" + std::to_string(idx);
const char* tags = tag.c_str();*/
delete[] buff1;
delete[] buff2;
delete[] buff3;
//image->GetMatByRoi(cvroi, mat);
//PRINT_CURR_TIME(tags);
//mat.copyTo(umat4Image);
//if (taskImageQueue.Size() >= 4)
// qDebug() << "ProcessThread::run::qsize::" << idx << "\t" << taskImageQueue.Size() << "\n";
//cv::rotate(umat4Image, umat4RotatedImage, cv::ROTATE_90_CLOCKWISE);
//qDebug() << "ROI- " << idx << " " << roi.X() << " " << roi.Y() << " " << roi.Width() << " " << roi.Height() << "\n";
#if USE_1080P
//qDebug() << "wgt------------idx:" << idx << ",cut image of timestamp : " << pImage->getInputFrameCurTimeStamp() << ", cur image of x : " << roi.X() << "\n";
size_t size = roi.Width() * roi.Height() << 2;
uint8_t* buff1 = new uint8_t[size];
libyuv::ConvertToARGB(pImage->GetBytes(), (1920 * 1080 << 2), buff1, (roi.Height() << 2),
roi.X(), roi.Y(), 1920, 1080, roi.Width(), roi.Height(),
libyuv::kRotate90, libyuv::FOURCC_ARGB);
//cv::resize(umat4RotatedImage, umat4FinalImage, cv::Size(1920, 1080));
uint8_t* buff2 = new uint8_t[1440 * 1080 << 2];
libyuv::ARGBScale(buff1, (roi.Height() << 2), roi.Height(), roi.Width(),
buff2, 1440 << 2, 1440, 1080, libyuv::FilterMode::kFilterNone);
//umat4FinalImage.copyTo(mat);
uint8_t* buff3 = new uint8_t[1920 * 1080 << 2];
memset(buff3, 0, (1920 * 1080 << 2));
libyuv::ARGBCopy(buff2, 1440 << 2, (buff3 + 240 * 4), 1920 << 2, 1440, 1080);
//END_TIME_COUNTER_BASE(OpenCV)
cv::Mat bgra = cv::Mat(1080, 1920, CV_8UC4, buff3);
pImage->SetMat(bgra);
tag = "ProcessThread::run::end::" + std::to_string(idx);
tags = tag.c_str();
//PRINT_CURR_TIME(tags);
//qDebug() << "ProcessThread image current time222:" << image->getInputFrameCurTimeStamp() << "\n";
emit PushFrame(image);
DEBUG_FUNCTION("taskImageQueue Size: ", taskImageQueue.Size())
delete[] buff1;
delete[] buff2;
delete[] buff3;
sendFrames++;
int elapse = (QDateTime::currentMSecsSinceEpoch() - sendStartTime) / 1000;
if (elapse >= 2) {
int fps = sendFrames / elapse;
qDebug() << "ProcessThread::run::fps::" << idx << "\t" << fps << "\n";
//image->GetMatByRoi(cvroi, mat);
sendFrames = 0;
sendStartTime = QDateTime::currentMSecsSinceEpoch();
}
}
//mat.copyTo(umat4Image);
while(taskImageQueue.Size() > 25 * OutputDeleyTime)
{
taskImageQueue.Pop(image);
}
while (taskROIQueue.Size() > 30)
{
taskROIQueue.Pop(roi);
}
//cv::rotate(umat4Image, umat4RotatedImage, cv::ROTATE_90_CLOCKWISE);
//cv::resize(umat4RotatedImage, umat4FinalImage, cv::Size(1920, 1080));
//umat4FinalImage.copyTo(mat);
//END_TIME_COUNTER_BASE(OpenCV)
/*tag = "ProcessThread::run::end::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
//qDebug() << "ProcessThread image current time222:" << image->getInputFrameCurTimeStamp() << "\n";
emit PushFrame(pImage);
DEBUG_FUNCTION("taskImageQueue Size: ", taskImageQueue.Size())
sendFrames++;
int elapse = (QDateTime::currentMSecsSinceEpoch() - sendStartTime) / 1000;
if (elapse >= 2) {
int fps = sendFrames / elapse;
qDebug() << "ProcessThread::run::fps::" << idx << "\t" << fps << "\n";
sendFrames = 0;
sendStartTime = QDateTime::currentMSecsSinceEpoch();
}
#else
WorkCutImage720p(pImage,roi);
#endif
}
void ProcessThread::WorkCutImage720p(std::shared_ptr<Image>& pImage, RoiMessage& roi)
{
size_t size = roi.Width() * roi.Height() << 2;
uint8_t* buff1 = new uint8_t[size];
libyuv::ConvertToARGB(pImage->GetBytes(), (1920 * 1080 << 2), buff1, (roi.Height() << 2),
roi.X(), roi.Y(), 1920, 1080, roi.Width(), roi.Height(),
libyuv::kRotate90, libyuv::FOURCC_ARGB);
/*uint8_t* buff2 = new uint8_t[1440 * 1080 << 2];
libyuv::ARGBScale(buff1, (roi.Height() << 2), roi.Height(), roi.Width(),
buff2, 1440 << 2, 1440, 1080, libyuv::FilterMode::kFilterNone);*/
uint8_t* buff3 = new uint8_t[1280 * 720 << 2];
memset(buff3, 0, (1280 * 720 << 2));
libyuv::ARGBCopy(buff1, (roi.Height() << 2), (buff3 + 100 * 4), 1280 << 2, roi.Height(), roi.Width());
cv::Mat bgra = cv::Mat(720, 1280, CV_8UC4, buff3);
pImage->SetMat(bgra);
delete[] buff1;
//delete[] buff2;
delete[] buff3;
emit PushFrame(pImage);
}
void ProcessThread::Clear()
......
#include "Utils/AudioPacket.h"
AudioPacket::AudioPacket(IDeckLinkAudioInputPacket* audioPacket, qint64& timestamp):frame_time_stamp(timestamp),channel(2)
{
if (audioPacket)
{
sample = audioPacket->GetSampleFrameCount();
size = sample * sizeof(int16_t) * channel;
void* data = nullptr;
audioPacket->GetBytes(&data);
if (data && size > 0) {
audio_data = QByteArray((char const*)data, size);
}
}
}
AudioPacket::~AudioPacket()
{
}
\ No newline at end of file
......@@ -54,6 +54,7 @@ int main(int argc, char *argv[])
qRegisterMetaType<Image>("Image");
qRegisterMetaType<Image>("Image&");
qRegisterMetaType<std::shared_ptr<Image>>("std::shared_ptr<Image>");
qRegisterMetaType<std::shared_ptr<AudioPacket>>("std::shared_ptr<AudioPacket>");
MomentaMedia w;
w.show();
......
This source diff could not be displayed because it is too large. You can view the blob instead.
[DELEYTIME]
DELEY_TIME=10
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
[DELEYTIME]
DELEY_TIME=5
FRAME_RATE=25
PRVW_FLAG=0
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment