Commit 783454f0 by 王国涛-悦动

pull

parent eccdcbf3
#include "Threads/CaptureThread.h"
#include "Threads/CaptureThread.h"
#include "Utils/Common.h"
static int64_t GetCurrTimeMS()
{
LARGE_INTEGER nStartTime;
//LARGE_INTEGER nStopTime;
//LARGE_INTEGER nElapsed;
LARGE_INTEGER nFrequency;
::QueryPerformanceFrequency(&nFrequency);
::QueryPerformanceCounter(&nStartTime);
return nStartTime.QuadPart * 1000 / nFrequency.QuadPart;
//::QueryPerformanceCounter(&nStopTime);
//nElapsed.QuadPart = (nStopTime.QuadPart - nStartTime.QuadPart) * 1000000;
//nElapsed.QuadPart /= nFrequency.QuadPart;
}
int CaptureThread::s_count = 0;
CaptureThread::CaptureThread()
: recvFrames(0),
idx(s_count++),
recvStartTime(QDateTime::currentMSecsSinceEpoch()),
m_fps(0),
m_lastRecvTS(TimeMilliSecond())
//taskQueue(std::string("task")+ std::to_string(idx))
{
//idx = s_count++;
m_scale = new VideoScale(3840, 2160, AV_PIX_FMT_UYVY422, 3840, 2160, AV_PIX_FMT_BGRA);
}
CaptureThread::~CaptureThread()
{
delete m_scale;
}
void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const qint64& timestamp)
{
int64_t st = QDateTime::currentMSecsSinceEpoch();
int64_t st2 = GetCurrTimeMS();
/*std::string tag = "CaptureThread::AddFrame::" + std::to_string(idx);
const char* tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
//qDebug() << "===> " << videoFrame->GetWidth() << " " << videoFrame->GetHeight() << "\n";
/*recvFrames++;
int elapse = (QDateTime::currentMSecsSinceEpoch() - recvStartTime) / 1000;
if (elapse >= 2) {
int fps = recvFrames / elapse;
qDebug() << "CaptureThread::AddFrame::fps::" << idx << "\t" << fps << "\n";
recvFrames = 0;
recvStartTime = QDateTime::currentMSecsSinceEpoch();
}*/
START_SLOT_TIME_COUNTER
if (videoFrame == nullptr)
return;
if (videoFrame->GetWidth() <=0 || videoFrame->GetHeight() <= 0)
return;
#if USE_4K
if (videoFrame->GetWidth() != 3840 || videoFrame->GetHeight() != 2160)
return;
#else
if (videoFrame->GetWidth() != 1920 || videoFrame->GetHeight() != 1080)
return;
#endif
if (taskQueue.size() >= 4)
qDebug() << "CaptureThread::AddFrame::qsize::" << idx << "\t" << taskQueue.size() << "\n";
/*tag = "CaptureThread::AddFrame::doing::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
{
std::unique_lock<std::mutex> ulock(mutex);
sequenceNum++;
/*tag = "CaptureThread::AddFrame::S0::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
//taskQueue.push(videoFrame);
videoFrameData video_data = { videoFrame,timestamp,sequenceNum };
taskVideoQueue.push(video_data);
/*tag = "CaptureThread::AddFrame::S1::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
cv.notify_all();
}
/*tag = "CaptureThread::AddFrame::End::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
/*int64_t elaspe = QDateTime::currentMSecsSinceEpoch() - st;
int64_t elaspe2 = GetCurrTimeMS() - st2;
if (elaspe >= 10)
qDebug() << "====> " << idx << "\t" << elaspe << " " << elaspe2 << "\n";*/
END_SLOT_TIME_COUNTER
}
void CaptureThread::run()
{
void* srcBuff;
uint8_t* dstBuff;
int dstBuffSize;
uint64_t currTime, deltaTime;
int qsize;
while(true)
{
START_WAIT_TIME_COUNTER
ComPtr<IDeckLinkVideoInputFrame> videoFrame;
qint64 cur_time = 0;
qint64 sequence = 0;
/*if (taskQueue.WaitFor(videoFrame))
{
END_WAIT_TIME_COUNTER
if (videoFrame.Get() != nullptr)
{
std::shared_ptr<Image> image = std::make_shared<Image>(videoFrame);
emit PushFrame(image);
}
DEBUG_FUNCTION("taskQeueue Size: ", taskQueue.size())
}*/
{
std::unique_lock<std::mutex> ulock(mutex);
while (taskVideoQueue.empty()) {
cv.wait(ulock);
}
auto& video_data = taskVideoQueue.front();
videoFrame = video_data.video_frame;
cur_time = video_data.timestamp;
sequence = video_data.sequenceNum;
taskVideoQueue.pop();
qsize = taskVideoQueue.size();
}
if (videoFrame.Get() != nullptr)
{
//auto cur_time = QDateTime::currentMSecsSinceEpoch();
//qDebug() << "input frame cur time:" << cur_time << "\n"
videoFrame->GetBytes(&srcBuff);
m_scale->scale((uint8_t*)srcBuff, 0, &dstBuff, &dstBuffSize);
std::shared_ptr<Image> image = std::make_shared<Image>(
std::make_shared<AVBuff>(dstBuff), 3840, 2610, bmdFormat8BitBGRA, cur_time, sequence);
emit PushFrame(image);
m_fps++;
}
currTime = TimeMilliSecond();
deltaTime = currTime - m_lastRecvTS;
if (deltaTime >= 1000)
{
qDebug() << GetCurrDateTimeStr() << " capture scale fps " << m_fps << ", qsize " << qsize << "\n";
m_fps = 0;
m_lastRecvTS = currTime;
}
/*while (taskQueue.size() > 30)
{
//taskQueue.Pop(videoFrame);
{
std::unique_lock<std::mutex> ulock(mutex);
taskQueue.pop();
}
}*/
}
}
\ No newline at end of file
#pragma once
#pragma once
#include <QThread>
#include <QMutex>
#include "Utils/Image.h"
#include "Utils/SampleQueue.h"
#include "Utils/VideoScale.h"
#include <condition_variable>
#include <memory>
#include <mutex>
#include <queue>
class CaptureThread : public QThread
{
Q_OBJECT
public:
CaptureThread();
~CaptureThread();
public slots:
void AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame,const qint64& timestamp);
signals:
void PushFrame(std::shared_ptr<Image> image);
private:
typedef struct videoFrameData
{
ComPtr<IDeckLinkVideoInputFrame> video_frame;
qint64 timestamp;
qint64 sequenceNum = 0;
}videoFrameData;
static int s_count;
int idx;
//SampleQueue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
void run() override;
int recvFrames;
int64_t recvStartTime;
std::queue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
std::queue<videoFrameData> taskVideoQueue;
std::condition_variable cv;
std::mutex mutex;
qint64 sequenceNum = 0;
VideoScale* m_scale;
int m_fps;
uint64_t m_lastRecvTS;
};
\ No newline at end of file
#pragma once
#pragma once
#include "DeckLinkAPI.h"
#include <QDateTime.h>
#include <sys/timeb.h>
#define USE_4K 1
#define USE_1080P 1
long GetRowBytesFromPixelFormat(long width, BMDPixelFormat pixelFormat);
static QString GetCurrDateTimeStr()
{
QString time_format = "yyyy-MM-dd HH:mm:ss";
QDateTime a = QDateTime::currentDateTime();
QString as = a.toString(time_format);
return as;
}
static uint64_t TimeMilliSecond()
{
timeb now;
ftime(&now);
return now.time * 1000 + now.millitm;
}
\ No newline at end of file
#include <QCoreApplication>
#include <QCoreApplication>
#include <QMessageBox>
#include <QTextStream>
#include "Utils/Platform.h"
#include "BlackMagicDesign/DeckLinkInputDevice.h"
#include "Utils/Common.h"
extern int AudioChannel;
DeckLinkInputDevice::DeckLinkInputDevice(QObject* parent, ComPtr<IDeckLink>& device, int index)
: RefCount(1),
Owner(parent),
DeckLink(device),
DeckLinkInput(IID_IDeckLinkInput, device),
DeckLinkConfig(IID_IDeckLinkConfiguration, device),
bSupportsFormatDetection(false),
bCurrentlyCapturing(false),
bApplyDetectedInputMode(false),
bLastValidFrameStatus(false),
SupportedInputConnections(bmdVideoConnectionUnspecified),
SelectedInputConnection(bmdVideoConnectionUnspecified),
Index(index),
m_pushed(true),
PrevInputSignalAbsent(false),
m_fps(0),
m_lastRecvTS(TimeMilliSecond())
{
//thd = std::thread(&DeckLinkInputDevice::ForwardThread, this);
}
DeckLinkInputDevice::~DeckLinkInputDevice()
{
if (bCurrentlyCapturing)
StopCapture();
}
// IUnknown methods
HRESULT DeckLinkInputDevice::QueryInterface(REFIID riid, LPVOID* ppv)
{
HRESULT result = S_OK;
if (ppv == nullptr)
return E_INVALIDARG;
// Obtain the IUnknown interface and compare it the provided REFIID
if(riid == IID_IUnknown)
{
*ppv = this;
AddRef();
}
else if(riid == IID_IDeckLinkInputCallback)
{
*ppv = (IDeckLinkInputCallback*)this;
AddRef();
}
else
{
*ppv = nullptr;
result = E_NOINTERFACE;
}
return result;
}
ULONG DeckLinkInputDevice::AddRef()
{
return ++RefCount;
}
ULONG DeckLinkInputDevice::Release()
{
ULONG newRefValue = --RefCount;
if (newRefValue == 0)
delete this;
return newRefValue;
}
// IDeckLinkInputCallback methods
HRESULT DeckLinkInputDevice::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioPacket)
{
// Since this application only previews, everything is driven from IDeckLinkScreenPreviewCallback::DrawFrame
ComPtr<IDeckLinkVideoInputFrame> frame = ComPtr<IDeckLinkVideoInputFrame>(videoFrame);
//emit ArrivedFrame(frame);
if (videoFrame && Capture) {
unsigned flags = videoFrame->GetFlags();
bool noInputSourceFlag = false;
if (flags & bmdFrameHasNoInputSource) {
//qDebug() << GetCurrDateTimeStr() << "index: " << Index << " DeckLinkInputDevice get video frame No input source " << hex << flags << " ------------ \n";
//return S_OK;
noInputSourceFlag = true;
}
bool restartStream = !noInputSourceFlag && PrevInputSignalAbsent;
if (restartStream)
{
DeckLinkInput->StopStreams();
DeckLinkInput->FlushStreams();
DeckLinkInput->StartStreams();
}
PrevInputSignalAbsent = noInputSourceFlag;
if (noInputSourceFlag)
return S_OK;
if (m_pushed)
{
auto cur_time = QDateTime::currentMSecsSinceEpoch();
Capture->AddFrame(frame, cur_time);
if (NDIOutput)
NDIOutput->AddFrame(frame);
}
m_pushed = !m_pushed;
uint64_t currTime, deltaTime;
m_fps++;
currTime = TimeMilliSecond();
deltaTime = currTime - m_lastRecvTS;
if (deltaTime >= 1000)
{
qDebug() << GetCurrDateTimeStr() << " decklink input fps " << m_fps << "\n";
m_fps = 0;
m_lastRecvTS = currTime;
}
}
if(audioPacket)
{
//qDebug() << "DeckLinkInputDevice get audio packet--------------" << "\n";
auto cur_time = QDateTime::currentMSecsSinceEpoch();
std::shared_ptr<AudioPacket> audio_ptr = std::make_shared<AudioPacket>(audioPacket, cur_time, AudioChannel);
emit PushAudioFrame(audio_ptr);
}
/*std::unique_lock<std::mutex> ulock(mutex);
taskQueue.push(frame);
cv.notify_all();*/
return S_OK;
}
/*void DeckLinkInputDevice::ForwardThread()
{
ComPtr<IDeckLinkVideoInputFrame> frame;
while (true) {
{
std::unique_lock<std::mutex> ulock(mutex);
while (taskQueue.empty()) {
cv.wait(ulock);
}
frame = taskQueue.front();
taskQueue.pop();
}
if (Capture) {
Capture->AddFrame(frame);
}
}
}*/
HRESULT DeckLinkInputDevice::VideoInputFormatChanged(BMDVideoInputFormatChangedEvents notificationEvents, IDeckLinkDisplayMode* newDisplayMode, BMDDetectedVideoInputFormatFlags detectedSignalFlags)
{
HRESULT result;
BMDPixelFormat pixelFormat;
BMDDisplayMode displayMode = newDisplayMode->GetDisplayMode();
return S_OK;
// Unexpected callback when auto-detect mode not enabled
if (!bApplyDetectedInputMode)
return E_FAIL;
if (detectedSignalFlags & bmdDetectedVideoInputRGB444)
{
if (detectedSignalFlags & bmdDetectedVideoInput8BitDepth)
pixelFormat = bmdFormat8BitARGB;
else if (detectedSignalFlags & bmdDetectedVideoInput10BitDepth)
pixelFormat = bmdFormat10BitRGB;
else if (detectedSignalFlags & bmdDetectedVideoInput12BitDepth)
pixelFormat = bmdFormat12BitRGB;
else
// Invalid color depth for RGB
return E_INVALIDARG;
}
else if (detectedSignalFlags & bmdDetectedVideoInputYCbCr422)
{
if (detectedSignalFlags & bmdDetectedVideoInput8BitDepth)
pixelFormat = bmdFormat8BitYUV;
else if (detectedSignalFlags & bmdDetectedVideoInput10BitDepth)
pixelFormat = bmdFormat10BitYUV;
else
// Invalid color depth for YUV
return E_INVALIDARG;
}
else
// Unexpected detected video input format flags
return E_INVALIDARG;
// Restart streams if either display mode or colorspace has changed
if (notificationEvents & (bmdVideoInputDisplayModeChanged | bmdVideoInputColorspaceChanged))
{
// Stop the capture
DeckLinkInput->StopStreams();
// Set the video input mode
//pixelFormat = bmdFormat8BitYUV;
result = DeckLinkInput->EnableVideoInput(displayMode, pixelFormat, bmdVideoInputEnableFormatDetection);
if (result == S_OK)
// Start the capture
result = DeckLinkInput->StartStreams();
if (result != S_OK)
// Let owner know we couldn`t restart capture with detected input video mode
QCoreApplication::postEvent(Owner, new QEvent(kErrorRestartingCaptureEvent));
else
QCoreApplication::postEvent(Owner, new DeckLinkInputFormatChangedEvent(displayMode));
}
return S_OK;
}
// Other methods
bool DeckLinkInputDevice::Initialize()
{
ComPtr<IDeckLinkProfileAttributes> deckLinkAttributes(IID_IDeckLinkProfileAttributes, DeckLink);
dlbool_t attributeFlag;
// Get input interface
if (!DeckLinkInput)
// This may occur if device does not have input interface, for instance DeckLink Mini Monitor.
return false;
// Get Configuration interface so we can change input connector
// We hold onto IDeckLinkConfiguration until destructor to retain input connector setting
if (!DeckLinkConfig)
return false;
// Get attributes interface
if (!deckLinkAttributes)
return false;
// Check if input mode detection is supported.
if (deckLinkAttributes->GetFlag(BMDDeckLinkSupportsInputFormatDetection, &attributeFlag) == S_OK)
bSupportsFormatDetection = attributeFlag;
else
bSupportsFormatDetection = false;
// Get the supported input connections for the device
if (deckLinkAttributes->GetInt(BMDDeckLinkVideoInputConnections, &SupportedInputConnections) != S_OK)
SupportedInputConnections = 0;
return true;
}
bool DeckLinkInputDevice::StartCapture(BMDDisplayMode displayMode, IDeckLinkScreenPreviewCallback* screenPreviewCallback, bool applyDetectedInputMode)
{
BMDVideoInputFlags videoInputFlags = bmdVideoInputFlagDefault;
bApplyDetectedInputMode = applyDetectedInputMode;
// Enable input video mode detection if the device supports it
if (bSupportsFormatDetection && bApplyDetectedInputMode)
videoInputFlags |= bmdVideoInputEnableFormatDetection;
// Set the screen preview
DeckLinkInput->SetScreenPreviewCallback(screenPreviewCallback);
// Set capture callback
DeckLinkInput->SetCallback(this);
// Set the video input mode
if (DeckLinkInput->EnableVideoInput(bmdMode4K2160p50, bmdFormat8BitYUV, bmdVideoInputFlagDefault) != S_OK)
return false;
if (DeckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, bmdAudioSampleType16bitInteger, AudioChannel) != S_OK)
return false;
// Set the capture
if (DeckLinkInput->StartStreams() != S_OK)
return false;
bCurrentlyCapturing = true;
return true;
}
void DeckLinkInputDevice::StopCapture()
{
if (DeckLinkInput)
{
// Stop the capture
DeckLinkInput->StopStreams();
DeckLinkInput->DisableVideoInput();
// Delete the callbacks
DeckLinkInput->SetScreenPreviewCallback(nullptr);
DeckLinkInput->SetCallback(nullptr);
}
bCurrentlyCapturing = false;
}
void DeckLinkInputDevice::QuerySupportedVideoModes(DeckLinkDisplayModeQueryFunc func)
{
ComPtr<IDeckLinkDisplayModeIterator> displayModeIterator;
ComPtr<IDeckLinkDisplayMode> displayMode;
if (DeckLinkInput->GetDisplayModeIterator(displayModeIterator.ReleaseAndGetAddressOf()) != S_OK)
return;
// Iterate through each supported display mode for the input connection
while(displayModeIterator->Next(displayMode.ReleaseAndGetAddressOf()) == S_OK)
{
dlbool_t supported = false;
BMDDisplayMode mode = displayMode->GetDisplayMode();
if ((DeckLinkInput->DoesSupportVideoMode(SelectedInputConnection, mode, bmdFormatUnspecified, bmdNoVideoInputConversion, bmdSupportedVideoModeDefault, NULL, &supported) == S_OK) && supported)
{
func(displayMode.Get());
}
}
}
HRESULT DeckLinkInputDevice::SetInputVideoConnection(BMDVideoConnection connection)
{
HRESULT result = DeckLinkConfig->SetInt(bmdDeckLinkConfigVideoInputConnection, (int64_t)connection);
if (result != S_OK)
return result;
SelectedInputConnection = connection;
return S_OK;
}
bool DeckLinkInputDevice::IsActive()
{
return IsDeviceActive(DeckLink);
}
#pragma once
#pragma once
#include <atomic>
#include <functional>
#include <QString>
#include <QObject>
#include <condition_variable>
#include <memory>
#include <mutex>
#include <queue>
#include <thread>
#include "DeckLinkAPI.h"
#include "Utils/CustomEvents.h"
#include "Utils/ComPtr.h"
#include "Utils/Image.h"
#include "Utils/AudioPacket.h"
#include "Threads/CaptureThread.h"
#include "NDI/NDIOutputThread.h"
class DeckLinkInputDevice : public QObject, public IDeckLinkInputCallback
{
Q_OBJECT
public:
using DeckLinkDisplayModeQueryFunc = std::function<void(IDeckLinkDisplayMode*)>;
DeckLinkInputDevice(QObject* parent, ComPtr<IDeckLink>& deckLink, int index);
virtual ~DeckLinkInputDevice();
// IUnknown interface
virtual HRESULT QueryInterface(REFIID riid, LPVOID* ppv) override;
virtual ULONG AddRef() override;
virtual ULONG Release() override;
// IDeckLinkInputCallback interface
virtual HRESULT VideoInputFormatChanged(BMDVideoInputFormatChangedEvents notificationEvents, IDeckLinkDisplayMode* newDisplayMode, BMDDetectedVideoInputFormatFlags detectedSignalFlags) override;
virtual HRESULT VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioPacket) override;
// Other methods
bool Initialize(void);
HRESULT GetDeviceName(QString& deviceName);
bool IsCapturing(void) const { return bCurrentlyCapturing; }
bool SupportsFormatDetection(void) const { return bSupportsFormatDetection; }
BMDVideoConnection GetVideoConnections(void) const { return (BMDVideoConnection)SupportedInputConnections; }
bool IsActive(void);
bool StartCapture(BMDDisplayMode displayMode, IDeckLinkScreenPreviewCallback* screenPreviewCallback, bool applyDetectedInputMode);
void StopCapture(void);
void QuerySupportedVideoModes(DeckLinkDisplayModeQueryFunc func);
HRESULT SetInputVideoConnection(BMDVideoConnection connection);
ComPtr<IDeckLink> GetDeckLinkInstance(void) const { return DeckLink; }
ComPtr<IDeckLinkInput> GetDeckLinkInput(void) const { return DeckLinkInput; }
ComPtr<IDeckLinkConfiguration> GetDeckLinkConfiguration(void) const { return DeckLinkConfig; }
void SetCapture(const std::shared_ptr<CaptureThread>& capture) {
Capture = capture;
}
void SetNDIOutputThread(const std::shared_ptr<NDIOutputThread>& ndiOuptutThread) {
NDIOutput = ndiOuptutThread;
}
//void ForwardThread();
signals:
void ArrivedFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame);
void PushAudioFrame(std::shared_ptr<AudioPacket> image);
private:
std::atomic<ULONG> RefCount;
QObject* Owner;
//
ComPtr<IDeckLink> DeckLink;
ComPtr<IDeckLinkInput> DeckLinkInput;
ComPtr<IDeckLinkConfiguration> DeckLinkConfig;
//
bool bSupportsFormatDetection;
bool bCurrentlyCapturing;
bool bApplyDetectedInputMode;
bool bLastValidFrameStatus;
int64_t SupportedInputConnections;
BMDVideoConnection SelectedInputConnection;
//
std::shared_ptr<CaptureThread> Capture;
std::shared_ptr<NDIOutputThread> NDIOutput;
//std::queue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
//std::condition_variable cv;
//std::mutex mutex;
//std::thread thd;
int Index;
bool m_pushed;
bool PrevInputSignalAbsent;
int m_fps;
uint64_t m_lastRecvTS;
};
class DeckLinkInputFormatChangedEvent : public QEvent
{
public:
DeckLinkInputFormatChangedEvent(BMDDisplayMode displayMode) : QEvent(kVideoFormatChangedEvent), DisplayMode(displayMode){}
virtual ~DeckLinkInputFormatChangedEvent(){};
BMDDisplayMode GetDisplayMode() const { return DisplayMode; }
private:
BMDDisplayMode DisplayMode;
};
namespace
{
inline bool IsDeviceActive(ComPtr<IDeckLink>& deckLink)
{
ComPtr<IDeckLinkProfileAttributes> deckLinkAttributes(IID_IDeckLinkProfileAttributes, deckLink);
int64_t intAttribute;
if (!deckLinkAttributes)
return false;
if (deckLinkAttributes->GetInt(BMDDeckLinkDuplex, &intAttribute) != S_OK)
return false;
return ((BMDDuplexMode)intAttribute) != bmdDuplexInactive;
}
}
#include <QStandardItemModel>
#include <QStandardItemModel>
#include <QStandardItem>
#include <QToolBox>
#include <QMessageBox>
#include "MomentaMedia.h"
#include "BlackMagicDesign/DeckLinkInputPage.h"
#include "Utils/Platform.h"
namespace
{
const int kComboMinimumWidth = 185;
const std::vector<std::pair<BMDVideoConnection, QString>> kVideoInputConnections = {
std::make_pair(bmdVideoConnectionSDI, QString("SDI")),
std::make_pair(bmdVideoConnectionHDMI, QString("HDMI")),
std::make_pair(bmdVideoConnectionOpticalSDI, QString("Optical SDI")),
std::make_pair(bmdVideoConnectionComponent, QString("Component")),
std::make_pair(bmdVideoConnectionComposite, QString("Composite")),
std::make_pair(bmdVideoConnectionSVideo, QString("S-Video")),
};
template<class T>
T* findParent(QWidget* widget)
{
T* result = nullptr;
do
{
widget = widget->parentWidget();
result = qobject_cast<T*>(widget);
} while (widget && !result);
return result;
}
}
DeckLinkInputPage::DeckLinkInputPage() : SelectedDevice(nullptr), NDIOutput(nullptr), Index(0)
{
FormLayout = new QFormLayout(this);
DeviceListCombo = new QComboBox();
DeviceListCombo->setMinimumWidth(kComboMinimumWidth);
FormLayout->addRow("Input Device:", DeviceListCombo);
DeviceListCombo->addItem("None");
InputConnectionCombo = new QComboBox();
InputConnectionCombo->setMinimumWidth(kComboMinimumWidth);
InputConnectionCombo->setEnabled(false);
FormLayout->addRow("Input Connection:", InputConnectionCombo);
VideoFormatCombo = new QComboBox();
VideoFormatCombo->setMinimumWidth(kComboMinimumWidth);
VideoFormatCombo->setEnabled(false);
FormLayout->addRow("Video Format:", VideoFormatCombo);
AutoDetectCheckBox = new QCheckBox();
AutoDetectCheckBox->setEnabled(false);
FormLayout->addRow("Auto-Detect Format:", AutoDetectCheckBox);
NDINameLabel = new QLabel();
FormLayout->addRow("NDIOutputLabel:", NDINameLabel);
PreviewView = new DeckLinkOpenGLWidget(dynamic_cast<QWidget*>(this));
PreviewView->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);
NDIOutput = std::make_unique<NDIOutputThread>("NDIOutput", 1920, 1080);
Capture = std::make_shared<CaptureThread>();
connect(DeviceListCombo, QOverload<int>::of(&QComboBox::currentIndexChanged), this, &DeckLinkInputPage::InputDeviceChanged);
connect(InputConnectionCombo, QOverload<int>::of(&QComboBox::currentIndexChanged), this, &DeckLinkInputPage::InputConnectionChanged);
connect(VideoFormatCombo, QOverload<int>::of(&QComboBox::currentIndexChanged), this, &DeckLinkInputPage::VideoFormatChanged);
connect(AutoDetectCheckBox, &QCheckBox::stateChanged, this, &DeckLinkInputPage::AutoDetectChanged);
connect(this, &QObject::objectNameChanged, this, &DeckLinkInputPage::ObjectNameChanged);
}
DeckLinkInputPage::~DeckLinkInputPage()
{
delete FormLayout;
SelectedDevice->StopCapture();
NDIOutput->exit();
Capture->exit();
}
void DeckLinkInputPage::SetPreviewSize(QSize previewSize, int index)
{
PreviewView->resize(previewSize);
PreviewView->Clear();
Index = index;
}
void DeckLinkInputPage::customEvent(QEvent* event)
{
switch (event->type())
{
case kVideoFormatChangedEvent:
{
DeckLinkInputFormatChangedEvent* formatEvent = dynamic_cast<DeckLinkInputFormatChangedEvent*>(event);
DetectedVideoFormatChanged(formatEvent->GetDisplayMode());
}
break;
case kErrorRestartingCaptureEvent:
{
event->ignore();
}
break;
default:
break;
}
}
void DeckLinkInputPage::StartCapture()
{
if (!SelectedDevice)
return;
BMDDisplayMode displayMode = bmdModeUnknown;
bool applyDetectedInputMode = AutoDetectCheckBox->isChecked();
displayMode = (BMDDisplayMode)VideoFormatCombo->currentData().value<unsigned int>();
SelectedDevice->StartCapture(displayMode, PreviewView->GetDelegate(), applyDetectedInputMode);
// Start NDI
NDIOutput->start();
// Start Capture
Capture->start();
}
void DeckLinkInputPage::AddDevice(ComPtr<IDeckLink>& deckLink, bool deviceIsActive)
{
dlstring_t deviceNameStr;
QString deviceName;
if (deckLink->GetDisplayName(&deviceNameStr) == S_OK)
{
deviceName = DlToQString(deviceNameStr);
DeleteString(deviceNameStr);
}
else
return;
int index = DeviceListCombo->findData(QVariant::fromValue((void*)deckLink.Get()));
if(index == -1)
{
DeviceListCombo->addItem(deviceName, QVariant::fromValue((void*)deckLink.Get()));
EnableDevice(deckLink, deviceIsActive);
}
if(!SelectedDevice && deviceIsActive)
{
// Request deckLink object from parent widget if it`s not already in use by another page
emit RequestDeckLinkIfAvailable(deckLink);
}
}
void DeckLinkInputPage::RemoveDevice(ComPtr<IDeckLink>& deckLink)
{
// Find the combo box entry and remove entry
int indexToRemove = DeviceListCombo->findData(QVariant::fromValue((void*)deckLink.Get()));
bool removingCurrentDevice = false;
if(indexToRemove > 0)
{
removingCurrentDevice = (indexToRemove == DeviceListCombo->currentIndex());
// Prevent signal on removeItem, so that we don`t refresh connector/video mode for removed device
bool blocked = DeviceListCombo->blockSignals(true);
DeviceListCombo->removeItem(indexToRemove);
DeviceListCombo->blockSignals(blocked);
}
if (removingCurrentDevice)
DeviceListCombo->setCurrentIndex(0);
}
void DeckLinkInputPage::EnableDevice(ComPtr<IDeckLink>& deckLink, bool enable)
{
if (deckLink == nullptr)
return;
int index = DeviceListCombo->findData(QVariant::fromValue((void*)deckLink.Get()));
if (index >= 0)
{
QStandardItemModel* model = qobject_cast<QStandardItemModel*>(DeviceListCombo->model());
QStandardItem* item = model->item(index);
item->setFlags(enable ? item->flags() | Qt::ItemIsEnabled : item->flags() & ~Qt::ItemIsEnabled);
}
SelectedDeviceChanged();
}
bool DeckLinkInputPage::ReleaseDeviceIfSelected(ComPtr<IDeckLink>& deckLink)
{
if((SelectedDevice.Get() != nullptr) && (SelectedDevice->GetDeckLinkInstance().Get() == deckLink.Get()))
{
// Device is selected, stop and release it
SelectedDevice->StopCapture();
SelectedDevice = nullptr;
DeviceListCombo->setCurrentIndex(0);
return true;
}
return false;
}
void DeckLinkInputPage::InputDeviceChanged(int selectedDeviceIndex)
{
START_SLOT_TIME_COUNTER
if (selectedDeviceIndex == -1)
return;
if(SelectedDevice)
{
ComPtr<IDeckLink> existingDevice = SelectedDevice->GetDeckLinkInstance();
// Stop and release existing selected device
SelectedDevice->StopCapture();
SelectedDevice = nullptr;
// Notify parent widget that device is available
emit RelinquishDeckLink(existingDevice);
}
QVariant selectedDeviceVariant = DeviceListCombo->itemData(selectedDeviceIndex);
ComPtr<IDeckLink> deckLink((IDeckLink*)selectedDeviceVariant.value<void*>());
if(deckLink)
{
// Request deckLink object from parent widget
emit RequestDeckLink(deckLink);
}
else
{
// Update UI since "None" was selected
SelectedDeviceChanged();
}
END_SLOT_TIME_COUNTER
}
void DeckLinkInputPage::RequestedDeviceGranted(ComPtr<IDeckLink>& device)
{
START_SLOT_TIME_COUNTER
SelectedDevice = MakeComPtr<DeckLinkInputDevice>(this, device, Index);
// Register profile callback with newly selected device`s profile manager
if (SelectedDevice)
SelectedDevice->Initialize();
//connect(SelectedDevice.Get(), SIGNAL(ArrivedFrame(ComPtr<IDeckLinkVideoInputFrame>)), Capture.get(), SLOT(AddFrame(ComPtr<IDeckLinkVideoInputFrame>)));
SelectedDevice.Get()->SetCapture(Capture);
SelectedDevice.Get()->SetNDIOutputThread(NDIOutput);
SelectedDeviceChanged();
StartCapture();
END_SLOT_TIME_COUNTER
}
void DeckLinkInputPage::InputConnectionChanged(int selectedConnectionIndex)
{
START_SLOT_TIME_COUNTER
HRESULT result;
if (selectedConnectionIndex == -1)
return;
QVariant selectedConnectionVariant = InputConnectionCombo->itemData(selectedConnectionIndex);
result = SelectedDevice->SetInputVideoConnection((BMDVideoConnection)selectedConnectionVariant.value<int64_t>());
if(result != S_OK)
{
QMessageBox::critical(this, "Input connection error", "Unable to set video input connector");
return;
}
// Update the video mode popup menu
RefreshDisplayModeMenu();
END_SLOT_TIME_COUNTER
}
void DeckLinkInputPage::VideoFormatChanged(int selectedVideoFormatIndex)
{
START_SLOT_TIME_COUNTER
if (!AutoDetectCheckBox->isChecked())
RestartCapture();
END_SLOT_TIME_COUNTER
}
void DeckLinkInputPage::AutoDetectChanged(int autoDetectState)
{
START_SLOT_TIME_COUNTER
VideoFormatCombo->setEnabled(static_cast<Qt::CheckState>(autoDetectState) != Qt::Checked);
RestartCapture();
END_SLOT_TIME_COUNTER
}
void DeckLinkInputPage::DetectedVideoFormatChanged(BMDDisplayMode displayMode)
{
// Update videoFormatPopup with auto-detected display name
int index = VideoFormatCombo->findData(QVariant::fromValue((uint64_t)displayMode));
if (index >= 0)
VideoFormatCombo->setCurrentIndex(index);
emit FormatChanged(displayMode);
}
void DeckLinkInputPage::ObjectNameChanged(const QString& newName)
{
START_SLOT_TIME_COUNTER
NDINameLabel->setText(QString("NDIOutput") + newName.at(newName.size() - 1));
NDIOutput = std::make_shared<NDIOutputThread>(NDINameLabel->text(), 1920, 1080);
//SelectedDevice.Get()->SetNDIOutputThread(NDIOutput);
//connect(Capture.get(), SIGNAL(PushFrame(std::shared_ptr<Image>)), NDIOutput.get(), SLOT(AddFrame(std::shared_ptr<Image>)));
//connect(SelectedDevice.Get(), SIGNAL(ArrivedFrame(ComPtr<IDeckLinkVideoInputFrame>)), NDIOutput.get(), SLOT(AddFrame(ComPtr<IDeckLinkVideoInputFrame>)));
END_SLOT_TIME_COUNTER
}
void DeckLinkInputPage::RestartCapture()
{
if (!SelectedDevice)
return;
SelectedDevice->StopCapture();
StartCapture();
}
void DeckLinkInputPage::SelectedDeviceChanged()
{
int indexToSelect = 0;
bool active = true;
if(SelectedDevice)
{
indexToSelect = DeviceListCombo->findData(QVariant::fromValue((void*)SelectedDevice->GetDeckLinkInstance().Get()));
AutoDetectCheckBox->setEnabled(SelectedDevice->SupportsFormatDetection());
AutoDetectCheckBox->setChecked(SelectedDevice->SupportsFormatDetection());
active = SelectedDevice->IsActive();
}
else
{
PreviewView->Clear();
AutoDetectCheckBox->setEnabled(false);
}
// Select the item in the combo box, but we don`t want to trigger any further processing
bool blocked = DeviceListCombo->blockSignals(true);
DeviceListCombo->setCurrentIndex(indexToSelect);
DeviceListCombo->blockSignals(blocked);
// Update the input connector popup menu which will in turn update the video format popup menu
RefreshInputConnectionMenu();
// Update the toolbox title and the overlay
QToolBox* toolBox = findParent<QToolBox>(this);
int pageIndex = toolBox->indexOf(this);
QString title = QString("Input %1: %2%3").arg(pageIndex + 1).arg(DeviceListCombo->itemText(indexToSelect)).arg(active ? "" : " [inactive]");
toolBox->setItemText(pageIndex, title);
PreviewView->GetOverlay()->SetDeviceLabel(title);
}
void DeckLinkInputPage::RefreshInputConnectionMenu()
{
BMDVideoConnection supportedConnections;
int64_t currentInputConnection;
if(!SelectedDevice)
{
InputConnectionCombo->setEnabled(false);
return;
}
// Get the available input video connections for the device
supportedConnections = SelectedDevice->GetVideoConnections();
// Get the current selected input connection
if(SelectedDevice->GetDeckLinkConfiguration()->GetInt(bmdDeckLinkConfigVideoInputConnection, &currentInputConnection) != S_OK)
{
currentInputConnection = bmdVideoConnectionUnspecified;
}
InputConnectionCombo->clear();
for(auto& inputConnection : kVideoInputConnections)
{
if (inputConnection.first & supportedConnections)
InputConnectionCombo->addItem(inputConnection.second, QVariant::fromValue((int64_t)inputConnection.first));
if (inputConnection.first == (BMDVideoConnection)currentInputConnection)
InputConnectionCombo->setCurrentIndex(InputConnectionCombo->count() - 1);
}
InputConnectionCombo->setEnabled(true);
}
void DeckLinkInputPage::RefreshDisplayModeMenu()
{
VideoFormatCombo->clear();
if (!SelectedDevice)
{
InputConnectionCombo->setEnabled(false);
return;
}
// Populate the display mode menu with a list of display modes supported by the installed DeckLink card
SelectedDevice->QuerySupportedVideoModes([this](IDeckLinkDisplayMode* displayMode)
{
dlstring_t modeName;
BMDDisplayMode mode = displayMode->GetDisplayMode();
if(displayMode->GetName(&modeName) == S_OK)
{
VideoFormatCombo->addItem(DlToQString(modeName), QVariant::fromValue((uint64_t)mode));
DeleteString(modeName);
}
});
VideoFormatCombo->setCurrentIndex(0);
InputConnectionCombo->setEnabled(true);
}
#pragma once
#pragma once
#include <QCheckBox>
#include <QComboBox>
#include <QFormLayout>
#include <QLabel>
#include <functional>
#include "DeckLinkInputDevice.h"
#include "DeckLinkOpenGLWidget.h"
#include "Utils/ComPtr.h"
#include "NDI/NDIOutputThread.h"
#include "Threads/CaptureThread.h"
class DeckLinkInputPage : public QWidget
{
Q_OBJECT
public:
DeckLinkInputPage();
virtual ~DeckLinkInputPage();
void SetPreviewSize(QSize previewSize, int index);
void customEvent(QEvent* event) override;
void StartCapture(void);
void AddDevice(ComPtr<IDeckLink>& deckLink, bool deviceIsActive);
void RemoveDevice(ComPtr<IDeckLink>& deckLink);
void EnableDevice(ComPtr<IDeckLink>& deckLink, bool enable);
bool ReleaseDeviceIfSelected(ComPtr<IDeckLink>& deckLink);
DeckLinkOpenGLWidget* GetPreviewView(void) const { return PreviewView; }
ComPtr<DeckLinkInputDevice> GetSelectedDevice(void) const { return SelectedDevice; }
CaptureThread* GetCapture() { return Capture.get(); }
public slots:
void InputDeviceChanged(int selectedDeviceIndex);
void InputConnectionChanged(int selectedConnectionIndex);
void VideoFormatChanged(int selectedVideoFormatIndex);
void AutoDetectChanged(int autoDetectState);
void RequestedDeviceGranted(ComPtr<IDeckLink>& device);
signals:
void RequestDeckLink(ComPtr<IDeckLink>& device);
void RequestDeckLinkIfAvailable(ComPtr<IDeckLink>& device);
void RelinquishDeckLink(ComPtr<IDeckLink>& device);
void FormatChanged(BMDDisplayMode displayMode);
private slots:
void ObjectNameChanged(const QString& newName);
private:
void RestartCapture(void);
void DetectedVideoFormatChanged(BMDDisplayMode displayMode);
void SelectedDeviceChanged(void);
void RefreshInputConnectionMenu(void);
void RefreshDisplayModeMenu(void);
ComPtr<DeckLinkInputDevice> SelectedDevice;
DeckLinkOpenGLWidget* PreviewView;
std::shared_ptr<CaptureThread> Capture;
std::shared_ptr<NDIOutputThread> NDIOutput;
QFormLayout* FormLayout;
QComboBox* DeviceListCombo;
QComboBox* InputConnectionCombo;
QComboBox* VideoFormatCombo;
QCheckBox* AutoDetectCheckBox;
QLabel* NDINameLabel;
int Index;
};
\ No newline at end of file
#include "BlackMagicDesign/DeckLinkOutputDevice.h"
#include "BlackMagicDesign/DeckLinkOutputDevice.h"
#include <QDebug>
#include <stdexcept>
#include <map>
#include "BlackMagicDesign/ScreenPreviewCallback.h"
#include "BlackMagicDesign/ReferenceTime.h"
extern int OutputDeleyTime;
extern int OutputPlayMode;
extern int AudioChannel;
extern std::map<qint32, qint32> map_output_delay;
#define OUTPUT_1080 1
#define AUDIOMAXSIZE (OutputDeleyTime + 3) * 50
DeckLinkOutputDevice::DeckLinkOutputDevice(ComPtr<IDeckLink>& decklink, int videoPrerollSize,int index)
: RefCount(1),
state(PlaybackState::Idle),
deckLink(decklink),
deckLinkOutput(IID_IDeckLinkOutput, decklink),
videoPrerollSize(1),
seenFirstVideoFrame(false),
startPlaybackTime(0),
scheduledFrameCompletedCallback(nullptr),
first_sleep(false),
Index(index),
m_fps(0),
m_lastRecvTS(TimeMilliSecond())
{
// Check that device has an output interface, this will throw an error if using a capture-only device such as DeckLink Mini Recorder
if (!deckLinkOutput)
throw std::runtime_error("DeckLink device does not have an output interface.");
//current_sleep_ms = OutputDeleyTime * 1000;
InitResource();
}
// IUnknown methods
HRESULT DeckLinkOutputDevice::QueryInterface(REFIID iid, LPVOID* ppv)
{
HRESULT result = S_OK;
if (ppv == nullptr)
return E_INVALIDARG;
// Obtain the IUnknown interface and compare it the provided REFIID
if(iid == IID_IUnknown)
{
*ppv = this;
AddRef();
}
else if(iid == IID_IDeckLinkVideoOutputCallback)
{
*ppv = (IDeckLinkVideoOutputCallback*)this;
AddRef();
}
else
{
*ppv = nullptr;
result = E_NOINTERFACE;
}
return result;
}
ULONG DeckLinkOutputDevice::AddRef()
{
return ++RefCount;
}
ULONG DeckLinkOutputDevice::Release()
{
ULONG newRefValue = --RefCount;
if (newRefValue == 0)
delete this;
return newRefValue;
}
// IDeckLinkVideoOutputCallback interface
HRESULT DeckLinkOutputDevice::ScheduledFrameCompleted(IDeckLinkVideoFrame* completedFrame, BMDOutputFrameCompletionResult result)
{
BMDTimeValue frameCompletionTimestamp;
// Get frame completion timestamp
//if(completedFrame)
//{
// Get the time that scheduled frame was completely transmitted by the device
// if(deckLinkOutput->GetFrameCompletionReferenceTimestamp(completedFrame, ReferenceTime::kTimescale, &frameCompletionTimestamp) == S_OK)
// {
// std::lock_guard<std::mutex> locker(mutex);
//
// for(auto iter = scheduledFramesList.rbegin(); iter != scheduledFramesList.rend(); iter++)
// {
// // TODO: may be
// auto videoFrame = iter->Get();
// if(videoFrame == completedFrame)
// {
// if(scheduledFrameCompletedCallback != nullptr)
// {
// videoFrame->setOutputCompletionResult(result);
// videoFrame->setOutputFrameCompletedReferenceTime(frameCompletionTimestamp - videoFrame->getVideoFrameDuration());
// scheduledFrameCompletedCallback(std::move(*iter));
// }
// Erase item from reverse_iterator
// scheduledFramesList.erase(std::next(iter).base());
// break;
// }
// }
// }
//}
return S_OK;
}
HRESULT DeckLinkOutputDevice::ScheduledPlaybackHasStopped()
{
{
//std::lock_guard<std::mutex> locker(mutex);
state = PlaybackState::Stopped;
}
//playbackStoppedCondition.notify_one();
return S_OK;
}
bool DeckLinkOutputDevice::StartPlayback(BMDDisplayMode displayMode, bool enable3D, BMDPixelFormat pixelFormat, bool requireReferenceLocked, IDeckLinkScreenPreviewCallback* screenPreviewCallback)
{
BMDDisplayMode outputDisplayMode;
#if OUTPUT_1080
switch (OutputPlayMode)
{
case 1:
outputDisplayMode = bmdModeHD1080i50;
break;
default:
outputDisplayMode = displayMode;
break;
}
#else
outputDisplayMode = BMDDisplayMode::bmdModeHD720p50;
#endif
#if USE_4K
outputDisplayMode = bmdModeHD1080p25;
#endif
// Pass through RP188 timecode and VANC from input frame. VITC timecode is forwarded with VANC
BMDVideoOutputFlags outputFlags = (BMDVideoOutputFlags)(bmdVideoOutputRP188 | bmdVideoOutputVANC);
ComPtr<IDeckLinkDisplayMode> deckLinkDisplayMode;
dlbool_t displayModeSupported;
BMDSupportedVideoModeFlags supportedVideoModeFlags = enable3D ? bmdSupportedVideoModeDualStream3D : bmdSupportedVideoModeDefault;
seenFirstVideoFrame = false;
startPlaybackTime = false;
{
//std::lock_guard<std::mutex> locker(mutex);
state = PlaybackState::Starting;
}
if((deckLinkOutput->DoesSupportVideoMode(bmdVideoConnectionUnspecified, outputDisplayMode, pixelFormat, bmdNoVideoOutputConversion, supportedVideoModeFlags, nullptr, &displayModeSupported) != S_OK) || !displayModeSupported)
{
qDebug() << "call DoesSupportVideoMode fuc is failure" << "\n";
return false;
}
if (deckLinkOutput->GetDisplayMode(outputDisplayMode, deckLinkDisplayMode.ReleaseAndGetAddressOf()) != S_OK)
{
qDebug() << "call GetDisplayMode fuc is failure" << "\n";
return false;
}
if (deckLinkDisplayMode->GetFrameRate(&frameDuration, &frameTimescale) != S_OK)
{
qDebug() << "call GetFrameRate fuc is failure" << "\n";
return false;
}
if (enable3D)
outputFlags = (BMDVideoOutputFlags)(outputFlags | bmdVideoOutputDualStream3D);
// Reference DeckLinkOutputDevice delegate callbacks
if (deckLinkOutput->SetScheduledFrameCompletionCallback(this) != S_OK)
{
qDebug() << "call SetScheduledFrameCompletionCallback fuc is failure" << "\n";
return false;
}
if (deckLinkOutput->EnableVideoOutput(outputDisplayMode, bmdVideoOutputFlagDefault) != S_OK)
{
qDebug() << "call EnableVideoOutput fuc is failure" << "\n";
return false;
}
if (deckLinkOutput->EnableAudioOutput(bmdAudioSampleRate48kHz,bmdAudioSampleType16bitInteger, AudioChannel,bmdAudioOutputStreamTimestamped) != S_OK)
{
qDebug() << "call EnableAudioOutput fuc is failure" << "\n";
return false;
}
deckLinkOutput->SetScreenPreviewCallback(screenPreviewCallback);
if (requireReferenceLocked)
{
if (!waitForReferenceSignalToLock())
return false;
}
outputVideoFrameQueue.Reset();
outputAudioFrameQueue.Reset();
scheduleVideoFramesThread = std::thread(&DeckLinkOutputDevice::scheduleVideoFramesFunc, this);
scheduleAudioFramesThread = std::thread(&DeckLinkOutputDevice::scheduleAudioFramesFuncDeley, this);
{
//std::lock_guard<std::mutex> locker(mutex);
state = PlaybackState::Prerolling;
}
return true;
}
void DeckLinkOutputDevice::StopPlayback()
{
PlaybackState currentState;
dlbool_t scheduledPlaybackRunning = false;
{
//std::lock_guard<std::mutex> locker(mutex);
currentState = state;
}
// Cancel scheduling thread
if((currentState == PlaybackState::Starting) || (currentState == PlaybackState::Prerolling) || (currentState == PlaybackState::Running))
{
// Terminate scheduling threads
{
// signal cancel flag to terminate wait condition
//std::lock_guard<std::mutex> locker(mutex);
state = PlaybackState::Stopping;
}
outputVideoFrameQueue.CancelWaiters();
outputAudioFrameQueue.CancelWaiters();
if (scheduleVideoFramesThread.joinable())
scheduleVideoFramesThread.join();
if (scheduleAudioFramesThread.joinable())
scheduleAudioFramesThread.join();
}
// In scheduled playback is running, stop video and audio streams immediately
if((deckLinkOutput->IsScheduledPlaybackRunning(&scheduledPlaybackRunning) == S_OK) && scheduledPlaybackRunning)
{
deckLinkOutput->StopScheduledPlayback(0, nullptr, 0);
{
// Wait for scheduled playback to complete
//std::unique_lock<std::mutex> locker(mutex);
//playbackStoppedCondition.wait(locker, [this] {return state == PlaybackState::Stopped; });
}
}
// Disable video
deckLinkOutput->DisableVideoOutput();
// Disable audio
deckLinkOutput->DisableAudioOutput();
// Dereference DeckLinkOutputDevice delegate from callbacks
deckLinkOutput->SetScheduledFrameCompletionCallback(nullptr);
{
//std::lock_guard<std::mutex> locker(mutex);
//scheduledFramesList.clear();
state = PlaybackState::Idle;
}
}
void DeckLinkOutputDevice::CancelWaitForReference()
{
{
//std::lock_guard<std::mutex> locker(mutex);
if (state == PlaybackState::Starting)
state = PlaybackState::Prerolling;
}
}
bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
{
ComPtr<IDeckLinkStatus> deckLinkStatus(IID_IDeckLinkStatus, deckLink);
if (mode == nullptr)
return false;
*mode = bmdModeUnknown;
if(deckLinkStatus)
{
dlbool_t referenceSignalLocked;
int64_t referenceSignalMode;
if((deckLinkStatus->GetFlag(bmdDeckLinkStatusReferenceSignalLocked, &referenceSignalLocked) == S_OK) && referenceSignalLocked)
{
if(deckLinkStatus->GetInt(bmdDeckLinkStatusReferenceSignalMode, &referenceSignalMode) == S_OK)
{
*mode = static_cast<BMDDisplayMode>(referenceSignalMode);
}
return true;
}
}
return false;
}
bool DeckLinkOutputDevice::isPlaybackActive()
{
//std::lock_guard<std::mutex> locker(mutex);
return state != PlaybackState::Idle;
}
void DeckLinkOutputDevice::outputAudioFrameFunc(void)
{
while (true)
{
}
}
void DeckLinkOutputDevice::scheduleVideoFramesFunc()
{
qDebug() << "start scheduleVideoFramesFunc function........" << "\n";
while(true)
{
START_WAIT_TIME_COUNTER
std::shared_ptr<Image> outputImage;
if(outputVideoFrameQueue.WaitFor(outputImage))
{
auto now_time = QDateTime::currentMSecsSinceEpoch();
auto dever_time = now_time - outputImage->getInputFrameCurTimeStamp();
//qDebug() << "index:"<<Index << "input frame cur time:" << outputImage->getInputFrameCurTimeStamp() << " now time:" << now_time << " dever time:" << dever_time << "\n";
if (dever_time < output_deley_ms)
{
current_sleep_ms = output_deley_ms - dever_time;
std::this_thread::sleep_for(std::chrono::milliseconds(current_sleep_ms));
}
END_WAIT_TIME_COUNTER
//std::lock_guard<std::mutex> locker(mutex);
current_video_time = outputImage->getInputFrameCurTimeStamp();
ComPtr<DeckLinkOutputVideoFrame> videoFrame = MakeComPtr<DeckLinkOutputVideoFrame>(outputImage);
// Record the stream time of the first frame, so we can start playing from the point
//if(!seenFirstVideoFrame)
//{
// startPlaybackTime = std::max(startPlaybackTime, videoFrame->getVideoStreamTime());
// seenFirstVideoFrame = true;
//}
// Get the reference time when video frame was scheduled
//videoFrame->setOutputFrameScheduledReferenceTime(ReferenceTime::getSteadyClockUptimeCOunt());
//if(deckLinkOutput->ScheduleVideoFrame(outputFrame.Get(), outputFrame->getVideoStreamTime(), frameDuration, frameTimescale) != S_OK)
auto ret = deckLinkOutput->DisplayVideoFrameSync(videoFrame.Get());
if (ret != S_OK)
{
if (ret == E_ACCESSDENIED) {
qDebug() << "The video output is not enabled." << "\n";
}
else if (ret == E_INVALIDARG)
{
qDebug() << "The frame attributes are invalid." << "\n";
}
else if (ret == E_FAIL)
{
qDebug() << "Failure." << "\n";
}
else
{
qDebug() << "other error code." << "\n";
}
}
else
{
m_fps++;
}
uint64_t currTime, deltaTime;
int qsize;
qsize = outputVideoFrameQueue.Size();
currTime = TimeMilliSecond();
deltaTime = currTime - m_lastRecvTS;
if (deltaTime >= 1000)
{
qDebug() << GetCurrDateTimeStr() << " decklink output fps " << m_fps << ", qsize " << qsize << "\n";
m_fps = 0;
m_lastRecvTS = currTime;
}
//if(deckLinkOutput->DisplayVideoFrameSync(videoFrame.Get()))
//{
// fprintf(stderr, "Unable to schedule output video frame\n");
// qDebug() << "Unable to schedule output video frame"<<"\n";
// //break;
// continue;
//}
//scheduledFramesList.push_back(outputFrame);
//checkEndOfPreroll();
}
/*while (outputVideoFrameQueue.Size() > 30)
{
outputVideoFrameQueue.Pop(outputImage);
}*/
}
}
void DeckLinkOutputDevice::scheduleAudioFramesFuncDeley(void)
{
while (true)
{
std::shared_ptr<AudioPacket> audio_packet;
if (outputAudioFrameQueue.WaitFor(audio_packet))
{
auto now_time = QDateTime::currentMSecsSinceEpoch();
auto data = audio_packet->buffer;
auto sample = audio_packet->sample;
auto audio_tm = audio_packet->frame_time_stamp;
auto dever_time = now_time - audio_tm;
//qDebug() << "index:" << Index << "input frame cur time:" << outputImage->getInputFrameCurTimeStamp() << " now time:" << now_time << " dever time:" << dever_time << "\n";
if (dever_time < output_deley_ms)
{
current_sleep_ms = output_deley_ms - dever_time;
std::this_thread::sleep_for(std::chrono::milliseconds(current_sleep_ms));
}
//qDebug() << "index:" << Index << "send sdi audio timestamp:" << audio_tm << ",video timestamp:" << current_video_time << "\n";
quint32 sampleFramesWritten;
HRESULT ret = deckLinkOutput->WriteAudioSamplesSync(data, sample, &sampleFramesWritten);
if (ret == S_OK)
{
if (sampleFramesWritten < sample)
{
}
}
else
{
qDebug() << "Unable to schedule output audio frame" << "\n";
break;
}
}
}
}
void DeckLinkOutputDevice::scheduleAudioFramesFunc(void)
{
while (true)
{
std::shared_ptr<AudioPacket> audio_packet;
if (outputAudioFrameQueue.WaitFor(audio_packet))
{
auto data = audio_packet->buffer;
auto sample = audio_packet->sample;
auto audio_tm = audio_packet->frame_time_stamp;
qint32 duration = sample * 1000 / 48000;
while(true)
{
/*if (!current_video_time) {
std::this_thread::sleep_for(std::chrono::milliseconds(current_sleep_ms));
}*/
if(audio_tm > current_video_time && audio_tm - current_video_time > duration)
{
std::this_thread::sleep_for(std::chrono::milliseconds(duration));
}
else if ( (audio_tm > current_video_time && audio_tm - current_video_time <= duration) || audio_tm <= current_video_time)
{
break;
}
else {
qDebug() << "send audio other--------------\n";
}
}
qDebug() << "index:"<<Index << "send sdi audio timestamp:" << audio_tm << ",video timestamp:"<<current_video_time << "\n";
quint32 sampleFramesWritten;
HRESULT ret = deckLinkOutput->WriteAudioSamplesSync(data,sample,&sampleFramesWritten);
if(ret == S_OK)
{
if (sampleFramesWritten < sample)
{
}
}
else
{
qDebug() << "Unable to schedule output audio frame" << "\n";
break;
}
}
}
}
bool DeckLinkOutputDevice::waitForReferenceSignalToLock()
{
ComPtr<IDeckLinkStatus> deckLinkStatus(IID_IDeckLinkStatus, deckLink);
dlbool_t referenceSignalLocked;
//auto isStarting = [this] {std::lock_guard<std::mutex> locker(mutex); return state == PlaybackState::Starting; };
//while(isStarting())
//{
// if ((deckLinkStatus->GetFlag(bmdDeckLinkStatusReferenceSignalLocked, &referenceSignalLocked) == S_OK) && referenceSignalLocked)
// return true;
// std::this_thread::sleep_for(std::chrono::milliseconds(50));
//}
return false;
}
void DeckLinkOutputDevice::checkEndOfPreroll()
{
// Ensure that both audio and video preroll have sufficient samples, then commence scheduled playback
if(state == PlaybackState::Prerolling)
{
// If prerolling, check whether sufficent audio and video samples have been scheduled
//if(scheduledFramesList.size() >= videoPrerollSize)
//{
// if(deckLinkOutput->StartScheduledPlayback(startPlaybackTime, frameTimescale, 1.0) != S_OK)
// {
// fprintf(stderr, "Unable to start scheduled playback\n");
// return;
// }
// state = PlaybackState::Running;
//}
}
}
void DeckLinkOutputDevice::AddFrame(std::shared_ptr<Image> image)
{
START_SLOT_TIME_COUNTER
if (!image->IsValid())
return;
//if (outputVideoFrameQueue.Size() >= 4)
// qDebug() << "DeckLinkOutputDevice::AddFrame video---qsize:" << "\t" << outputVideoFrameQueue.Size() << "\n";
if (outputVideoFrameQueue.Size() > audio_max_size)
{
outputVideoFrameQueue.Reset();
}
if (image) outputVideoFrameQueue.Push(image);
END_SLOT_TIME_COUNTER
}
void DeckLinkOutputDevice::AddAudioFrame(std::shared_ptr<AudioPacket> audio_packet)
{
START_SLOT_TIME_COUNTER
//if (outputAudioFrameQueue.Size() >= 4)
//qDebug() << "index:" << Index << "DeckLinkOutputDevice::AddAudioFrame audio---qsize:" << "\t" << outputAudioFrameQueue.Size() << "\n";
if (outputAudioFrameQueue.Size() > audio_max_size)
{
outputAudioFrameQueue.Reset();
}
if (audio_packet)
{
outputAudioFrameQueue.Push(audio_packet);
}
END_SLOT_TIME_COUNTER
}
void DeckLinkOutputDevice::InitResource()
{
if (deckLink)
{
dlstring_t displayName;
QString deviceName;
if (deckLink->GetDisplayName(&displayName) == S_OK)
{
deviceName = DlToQString(displayName);
DeleteString(displayName);
QRegExp rx("\\d+");
rx.indexIn(deviceName, 0);
QString deviceIdStr;
deviceIdStr = rx.cap(0);
Device_id = deviceIdStr.toInt();
if (Device_id > 4)
{
int key = Device_id - 4;
int value = map_output_delay[key];
if (value < 1 * 1000)
{
output_deley_ms = 1 * 1000;
}
else output_deley_ms = value;
current_sleep_ms = output_deley_ms;
audio_max_size = (output_deley_ms / 1000 + 3) * 50;
}
}
}
}
\ No newline at end of file
#pragma once
#pragma once
#include <atomic>
#include <functional>
#include <condition_variable>
#include <list>
#include <memory>
#include <QMutex>
#include <QThread>
#include <QString>
#include <QObject>
#include "DeckLinkAPI.h"
#include "Utils/CustomEvents.h"
#include "Utils/Common.h"
#include "Utils/SampleQueue.h"
#include "Utils/ComPtr.h"
#include "Utils/Platform.h"
#include "DeckLinkOutputVideoFrame.h"
#include "Utils/Image.h"
#include "Utils/AudioPacket.h"
class DeckLinkOutputDevice : public QObject, public IDeckLinkVideoOutputCallback
{
Q_OBJECT
enum class PlaybackState { Idle, Starting, Prerolling, Running, Stopping, Stopped };
using ScheduledFrameCompletedCallback = std::function<void(ComPtr<DeckLinkOutputVideoFrame>)>;
using ScheduledFramesList = std::list<ComPtr<DeckLinkOutputVideoFrame>>;
public:
DeckLinkOutputDevice(ComPtr<IDeckLink>& decklink, int videoPrerollSize,int index);
virtual ~DeckLinkOutputDevice() = default;
// IUnknown interface
HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID* ppv) override;
ULONG STDMETHODCALLTYPE AddRef() override;
ULONG STDMETHODCALLTYPE Release() override;
// IDeckLinkVideoOutputCallback interface
HRESULT STDMETHODCALLTYPE ScheduledFrameCompleted(IDeckLinkVideoFrame* completedFrame, BMDOutputFrameCompletionResult result) override;
HRESULT STDMETHODCALLTYPE ScheduledPlaybackHasStopped() override;
// Other methods
bool StartPlayback(BMDDisplayMode displayMode, bool enable3D, BMDPixelFormat pixelFormat, bool requireReferenceLocked, IDeckLinkScreenPreviewCallback* screenPreviewCallback);
void StopPlayback(void);
void CancelWaitForReference();
BMDTimeScale getFrameTimescale(void) const { return frameTimescale; }
bool getReferenceSignalMode(BMDDisplayMode* mode);
bool isPlaybackActive(void);
void onScheduledFrameCompleted(const ScheduledFrameCompletedCallback& callback) { scheduledFrameCompletedCallback = callback; }
ComPtr<IDeckLink> GetDeckLinkInstance(void) const { return deckLink; }
ComPtr<IDeckLinkOutput> getDeckLinkOutput(void) const { return deckLinkOutput; }
public slots:
void AddAudioFrame(std::shared_ptr<AudioPacket> audio_packet);
void AddFrame(std::shared_ptr<Image> image);
private:
void InitResource();
private:
std::atomic<ULONG> RefCount;
PlaybackState state;
//
ComPtr<IDeckLink> deckLink;
ComPtr<IDeckLinkOutput> deckLinkOutput;
BMDVideoConnection SelectedOutputConnection;
//
SampleQueue<std::shared_ptr<Image>> outputVideoFrameQueue;
SampleQueue<std::shared_ptr<AudioPacket>> outputAudioFrameQueue;
//ScheduledFramesList scheduledFramesList;
//
uint32_t videoPrerollSize;
//
BMDTimeValue frameDuration;
BMDTimeScale frameTimescale;
//
bool seenFirstVideoFrame;
BMDTimeValue startPlaybackTime;
bool first_sleep;
//
//std::mutex mutex;
//std::condition_variable playbackStoppedCondition;
//
std::thread scheduleVideoFramesThread;
std::thread scheduleAudioFramesThread;
//
ScheduledFrameCompletedCallback scheduledFrameCompletedCallback;
// Private methods
void scheduleVideoFramesFunc(void);
void scheduleAudioFramesFunc(void);
void scheduleAudioFramesFuncDeley(void);
void outputAudioFrameFunc(void);
bool waitForReferenceSignalToLock(void);
void checkEndOfPreroll(void);
//
int64_t current_video_time = 0;
int64_t current_sleep_ms = 0;
qint32 Index;
qint32 Device_id;
qint32 output_deley_ms;//
qint32 audio_max_size;
int m_fps;
uint64_t m_lastRecvTS;
};
#include <QStandardItemModel>
#include <QStandardItemModel>
#include <QStandardItem>
#include <QToolBox>
#include <QMessageBox>
#include <QLineEdit>
#include "MomentaMedia.h"
#include "BlackMagicDesign/DeckLinkOutputPage.h"
#include "Utils/Platform.h"
namespace
{
const int kComboMinimumWidth = 185;
const std::vector<std::pair<BMDVideoConnection, QString>> kVideoOutputConnections = {
std::make_pair(bmdVideoConnectionSDI, QString("SDI")),
std::make_pair(bmdVideoConnectionHDMI, QString("HDMI")),
std::make_pair(bmdVideoConnectionOpticalSDI, QString("Optical SDI")),
std::make_pair(bmdVideoConnectionComponent, QString("Component")),
std::make_pair(bmdVideoConnectionComposite, QString("Composite")),
std::make_pair(bmdVideoConnectionSVideo, QString("S-Video")),
};
const std::vector<std::pair<BMDPixelFormat, QString>> kVideoOutputPixelFormat = {
std::make_pair(bmdFormat8BitYUV, QString("8BitYUV")),
std::make_pair(bmdFormat8BitARGB, QString("8BitARGB")),
std::make_pair(bmdFormat8BitBGRA, QString("8BitBGRA")),
std::make_pair(bmdFormat10BitYUV, QString("10BitYUV")),
std::make_pair(bmdFormat10BitRGB, QString("10BitRGB")),
std::make_pair(bmdFormat10BitRGBX, QString("10BitRGBX")),
std::make_pair(bmdFormat10BitRGBXLE, QString("10BitRGBXLE")),
std::make_pair(bmdFormat12BitRGB, QString("12BitRGB")),
std::make_pair(bmdFormat12BitRGBLE, QString("12BitRGBLE")),
};
template<class T>
T* findParent(QWidget* widget)
{
T* result = nullptr;
do
{
widget = widget->parentWidget();
result = qobject_cast<T*>(widget);
} while (widget && !result);
return result;
}
}
DeckLinkOutputPage::DeckLinkOutputPage() : SelectedDevice(nullptr), Process(nullptr)
{
FormLayout = new QFormLayout(this);
DeviceListCombo = new QComboBox();
DeviceListCombo->setMinimumWidth(kComboMinimumWidth);
FormLayout->addRow("Output Device:", DeviceListCombo);
DeviceListCombo->addItem("None");
VideoFormatCombo = new QComboBox();
VideoFormatCombo->setMinimumWidth(kComboMinimumWidth);
VideoFormatCombo->setEnabled(false);
FormLayout->addRow("Video Format", VideoFormatCombo);
VideoPixelFormatCombo = new QComboBox();
VideoPixelFormatCombo->setMinimumWidth(kComboMinimumWidth);
VideoPixelFormatCombo->setEnabled(false);
FormLayout->addRow("Video Pixel Format:", VideoPixelFormatCombo);
PreviewView = new DeckLinkOpenGLWidget(dynamic_cast<QWidget*>(this));
PreviewView->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::Expanding);
PortNumLineEdit = new QLineEdit();
PortNumLineEdit->setMinimumWidth(kComboMinimumWidth);
FormLayout->addRow("PortNum", PortNumLineEdit);
connect(DeviceListCombo, QOverload<int>::of(&QComboBox::currentIndexChanged), this, &DeckLinkOutputPage::OutputDeviceChanged);
connect(VideoFormatCombo, QOverload<int>::of(&QComboBox::currentIndexChanged), this, &DeckLinkOutputPage::VideoFormatChanged);
connect(VideoPixelFormatCombo, QOverload<int>::of(&QComboBox::currentIndexChanged), this, &DeckLinkOutputPage::VideoPixelFormatChanged);
connect(this, &QObject::objectNameChanged, this, &DeckLinkOutputPage::ObjectNameChanged);
}
DeckLinkOutputPage::~DeckLinkOutputPage()
{
delete FormLayout;
SelectedDevice->StopPlayback();
Process->exit();
}
void DeckLinkOutputPage::SetPreviewSize(QSize previewSize)
{
PreviewView->resize(previewSize);
PreviewView->Clear();
}
void DeckLinkOutputPage::customEvent(QEvent* event)
{
switch (event->type())
{
case kVideoFormatChangedEvent:
{
event->ignore();
}
case kErrorRestartingCaptureEvent:
{
event->ignore();
}
break;
default:
break;
}
}
void DeckLinkOutputPage::StartOutput()
{
/*
if (!SelectedDevice)
return;
BMDDisplayMode displayMode = bmdModeHD1080p25;
//displayMode = (BMDDisplayMode)VideoFormatCombo->currentData().value<unsigned int>();
qDebug() << "==> " << displayMode << "\n";
BMDPixelFormat pixelFormat = bmdFormat10BitYUV;
//pixelFormat = (BMDPixelFormat)VideoPixelFormatCombo->currentData().value<unsigned int>();
SelectedDevice->StartPlayback(displayMode, false, pixelFormat, false, PreviewView->GetDelegate());
*/
}
void DeckLinkOutputPage::AddDevice(ComPtr<IDeckLink>& deckLink, bool deviceIsActive)
{
dlstring_t deviceNameStr;
QString deviceName;
if (deckLink->GetDisplayName(&deviceNameStr) == S_OK)
{
deviceName = DlToQString(deviceNameStr);
DeleteString(deviceNameStr);
}
else
return;
int index = DeviceListCombo->findData(QVariant::fromValue((void*)deckLink.Get()));
if(index == -1)
{
DeviceListCombo->addItem(deviceName, QVariant::fromValue((void*)deckLink.Get()));
EnableDevice(deckLink, deviceIsActive);
}
if(!SelectedDevice)
{
// Request deckLink object from parent widget if it`s not already in use by anther page
emit RequestDeckLinkIfAvailable(deckLink);
}
}
void DeckLinkOutputPage::RemoveDevice(ComPtr<IDeckLink>& deckLink)
{
// Find the combo box entry and remove entry
int indexToRemove = DeviceListCombo->findData(QVariant::fromValue((void*)deckLink.Get()));
bool removingCurrentDevice = false;
if(indexToRemove > 0)
{
removingCurrentDevice = (indexToRemove == DeviceListCombo->currentIndex());
// Prevent signal on removeItem, so that we don`t refresh connector/video mode for removed device
bool blocked = DeviceListCombo->blockSignals(true);
DeviceListCombo->removeItem(indexToRemove);
DeviceListCombo->blockSignals(blocked);
}
if (removingCurrentDevice)
DeviceListCombo->setCurrentIndex(0);
}
void DeckLinkOutputPage::EnableDevice(ComPtr<IDeckLink>& deckLink, bool enable)
{
if (deckLink == nullptr)
return;
int index = DeviceListCombo->findData(QVariant::fromValue((void*)deckLink.Get()));
if(index >= 0)
{
QStandardItemModel* model = qobject_cast<QStandardItemModel*>(DeviceListCombo->model());
QStandardItem* item = model->item(index);
item->setFlags(enable ? item->flags() | Qt::ItemIsEnabled : item->flags() & ~Qt::ItemIsEnabled);
}
SelectedDeviceChanged();
}
bool DeckLinkOutputPage::ReleaseDeviceIfSelected(ComPtr<IDeckLink>& deckLink)
{
if((SelectedDevice.Get() != nullptr) && (SelectedDevice->GetDeckLinkInstance().Get() == deckLink.Get()))
{
// Device is selected, stop and release it
SelectedDevice->StopPlayback();
SelectedDevice = nullptr;
DeviceListCombo->setCurrentIndex(0);
return true;
}
return false;
}
void DeckLinkOutputPage::OutputDeviceChanged(int selectedDeviceIndex)
{
START_SLOT_TIME_COUNTER
if (selectedDeviceIndex == -1)
return;
if(SelectedDevice)
{
ComPtr<IDeckLink> existingDevice = SelectedDevice->GetDeckLinkInstance();
// Stop and release existing selected device
SelectedDevice->StopPlayback();
SelectedDevice = nullptr;
// Notify parent widget that device is available
emit RelinquishDeckLink(existingDevice);
}
QVariant selectedDeviceVariant = DeviceListCombo->itemData(selectedDeviceIndex);
ComPtr<IDeckLink> deckLink((IDeckLink*)selectedDeviceVariant.value<void*>());
if(deckLink)
{
// Request deckLink object from parent widget
emit RequestDeckLink(deckLink);
}
else
{
// Update UI since "None" was selected
SelectedDeviceChanged();
}
END_SLOT_TIME_COUNTER
}
void DeckLinkOutputPage::RequestedDeviceGranted(ComPtr<IDeckLink>& device)
{
START_SLOT_TIME_COUNTER
SelectedDevice = MakeComPtr<DeckLinkOutputDevice>(device, 1,Index);
// Register profile callback with newly selected device`s profile manager
// TODO: Connect
connect(BindingInputPage->GetCapture(), &CaptureThread::PushFrame, Process.get(), &ProcessThread::AddFrame);
connect(Process.get(), &ProcessThread::PushFrame, SelectedDevice.Get(), &DeckLinkOutputDevice::AddFrame);
connect(BindingInputPage->GetSelectedDevice().Get(), &DeckLinkInputDevice::PushAudioFrame, SelectedDevice.Get(), &DeckLinkOutputDevice::AddAudioFrame);
connect(BindingInputPage, &DeckLinkInputPage::FormatChanged, this, &DeckLinkOutputPage::FormatChanged);
if(Process->isRunning())
Process->exit();
Process->SetUpUDP("127.0.0.1", PortNumLineEdit->text());
Process->start();
SelectedDeviceChanged();
StartOutput();
SelectedDevice->StartPlayback(bmdModeHD1080p50, false, bmdFormat8BitBGRA, false, PreviewView->GetDelegate());
END_SLOT_TIME_COUNTER
}
void DeckLinkOutputPage::FormatChanged(BMDDisplayMode displayMode)
{
BMDDisplayMode displayMode_ = displayMode;
//BMDPixelFormat pixelFormat = bmdFormat10BitYUV;
BMDPixelFormat pixelFormat = bmdFormat8BitBGRA;
SelectedDevice->StartPlayback(displayMode_, false, pixelFormat, false, PreviewView->GetDelegate());
}
void DeckLinkOutputPage::ObjectNameChanged(const QString& newName)
{
START_SLOT_TIME_COUNTER
PortNumLineEdit->setText(QString::number(5000 + QString(newName.at(newName.size() - 1)).toInt()));
END_SLOT_TIME_COUNTER
}
void DeckLinkOutputPage::VideoFormatChanged(int selectedVideoFormatIndex)
{
START_SLOT_TIME_COUNTER
END_SLOT_TIME_COUNTER
}
void DeckLinkOutputPage::VideoPixelFormatChanged(int selectedVideoPixelFormatIndex)
{
START_SLOT_TIME_COUNTER
END_SLOT_TIME_COUNTER
}
void DeckLinkOutputPage::RestartOutput()
{
START_SLOT_TIME_COUNTER
END_SLOT_TIME_COUNTER
}
void DeckLinkOutputPage::SelectedDeviceChanged()
{
int indexToSelect = 0;
bool active = true;
if(SelectedDevice)
{
indexToSelect = DeviceListCombo->findData(QVariant::fromValue((void*)SelectedDevice->GetDeckLinkInstance().Get()));
active = SelectedDevice->isPlaybackActive();
}
else
{
PreviewView->Clear();
}
// Select the item in the combo box,but we don`t want to trigger any further processing
bool blocked = DeviceListCombo->blockSignals(true);
DeviceListCombo->setCurrentIndex(indexToSelect);
DeviceListCombo->blockSignals(blocked);
// Update the input connector popup menu which will in turn update the video format popup menu
// Update the toolbox title and the overlay
QToolBox* toolBox = findParent<QToolBox>(this);
int pageIndex = toolBox->indexOf(this);
QString title = QString("Output %1: %2%3").arg(pageIndex + 1).arg(DeviceListCombo->itemText(indexToSelect)).arg(active ? "" : " [inactive]");
toolBox->setItemText(pageIndex, title);
PreviewView->GetOverlay()->SetDeviceLabel(title);
}
void DeckLinkOutputPage::RefreshDisplayModeMenu()
{
}
#include "BlackMagicDesign/DeckLinkOutputVideoFrame.h"
#include "BlackMagicDesign/DeckLinkOutputVideoFrame.h"
#include <chrono>
#include <QDebug>
#include "Utils/Common.h"
#include <immintrin.h>
#include <cstdint>
#if 0
DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame() : RefCount(1), width(-1), height(-1), rowBytes(-1), frameFlags(bmdFrameFlagDefault), pixelFormat(bmdFormat8BitBGRA)
{
}
DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame(int w, int h, BMDFrameFlags flags, BMDPixelFormat pixelFormat) : RefCount(1), width(w), height(h), frameFlags(bmdFrameFlagDefault), pixelFormat(pixelFormat)
{
rowBytes = GetRowBytesFromPixelFormat(width, pixelFormat);
buffer.resize(height * rowBytes);
}
DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame(Image& image)
: RefCount(1),
width(image.GetWidth()),
height(image.GetHegiht()),
frameFlags(image.GetFlags()),
pixelFormat(image.GetPixelFormat())
{
rowBytes = GetRowBytesFromPixelFormat(width, pixelFormat);
buffer.resize(height * rowBytes);
memcpy(buffer.data(), image.GetBytes(), height * rowBytes);
}
#endif
DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame(std::shared_ptr<Image> image)
: RefCount(1),
width(image->GetWidth()),
height(image->GetHegiht()),
frameFlags(image->GetFlags()),
pixelFormat(image->GetPixelFormat()),
m_img(image)
{
rowBytes = GetRowBytesFromPixelFormat(width, pixelFormat);
//buffer.resize(height * rowBytes);
//memcpy(buffer.data(), image->GetBytes(), height * rowBytes);
}
#if 0
DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame(Image&& image)
: RefCount(1),
width(image.GetWidth()),
height(image.GetHegiht()),
frameFlags(image.GetFlags()),
pixelFormat(image.GetPixelFormat())
{
rowBytes = GetRowBytesFromPixelFormat(width, pixelFormat);
buffer.resize(height * rowBytes);
memcpy(buffer.data(), image.GetBytes(), height * rowBytes);
}
#endif
DeckLinkOutputVideoFrame::~DeckLinkOutputVideoFrame()
{
//buffer.clear();
}
ULONG DeckLinkOutputVideoFrame::AddRef()
{
return ++RefCount;
}
ULONG DeckLinkOutputVideoFrame::Release()
{
ULONG newRefValue = --RefCount;
if (newRefValue == 0)
delete this;
return newRefValue;
}
HRESULT DeckLinkOutputVideoFrame::QueryInterface(REFIID riid, LPVOID* ppv)
{
HRESULT result = E_NOINTERFACE;
if (ppv == nullptr)
return E_INVALIDARG;
// Initialize the return result
*ppv = nullptr;
// Obtain the IUnknown interface and compare it the provided REFIID
if(riid == IID_IUnknown)
{
*ppv = this;
AddRef();
result = S_OK;
}
else if(riid == IID_IDeckLinkVideoFrame)
{
*ppv = (IDeckLinkVideoFrame*)this;
AddRef();
result = S_OK;
}
return result;
}
long DeckLinkOutputVideoFrame::GetWidth()
{
return (long)width;
}
long DeckLinkOutputVideoFrame::GetHeight()
{
return (long)height;
}
BMDFrameFlags DeckLinkOutputVideoFrame::GetFlags()
{
return frameFlags;
}
BMDPixelFormat DeckLinkOutputVideoFrame::GetPixelFormat()
{
return pixelFormat;
}
long DeckLinkOutputVideoFrame::GetRowBytes()
{
return rowBytes;
}
HRESULT DeckLinkOutputVideoFrame::GetBytes(void** buf)
{
*buf = m_img->GetBytes();
return S_OK;
}
#pragma once
#pragma once
#include <atomic>
#include <vector>
#include "Utils/ComPtr.h"
#include "DeckLinkAPI.h"
#include "Utils/Image.h"
class DeckLinkOutputVideoFrame : public IDeckLinkVideoFrame
{
public:
//DeckLinkOutputVideoFrame();
//DeckLinkOutputVideoFrame(int w, int h, BMDFrameFlags flags, BMDPixelFormat pixelFormat);
//DeckLinkOutputVideoFrame(Image& image);
DeckLinkOutputVideoFrame(std::shared_ptr<Image> image);
//DeckLinkOutputVideoFrame(Image&& image);
virtual ~DeckLinkOutputVideoFrame();
// IDeckLinkVideoFrame interface
virtual long STDMETHODCALLTYPE GetWidth();
virtual long STDMETHODCALLTYPE GetHeight();
virtual long STDMETHODCALLTYPE GetRowBytes();
virtual HRESULT STDMETHODCALLTYPE GetBytes(void** buf);
virtual BMDFrameFlags STDMETHODCALLTYPE GetFlags();
virtual BMDPixelFormat STDMETHODCALLTYPE GetPixelFormat();
// Dummy implementations of remaining method in IDeckLinkVideoFrame
virtual HRESULT STDMETHODCALLTYPE GetAncillaryData(IDeckLinkVideoFrameAncillary** ancillary) { return E_NOTIMPL; }
virtual HRESULT STDMETHODCALLTYPE GetTimecode(BMDTimecodeFormat format, IDeckLinkTimecode** timecode) { return E_NOTIMPL; }
// IUnknown interface
virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void** ppv);
virtual ULONG STDMETHODCALLTYPE AddRef();
virtual ULONG STDMETHODCALLTYPE Release();
void setVideoStreamTime(const BMDTimeValue time) { videoStreamTime = time; }
void setVideoFrameDuration(const BMDTimeValue time) { videoFrameDuration = time; }
void setInputFrameStartReferenceTime(const BMDTimeValue time) { inputFrameStartReferenceTime = time; }
void setInputFrameArrivedReferenceTime(const BMDTimeValue time) { inputFrameArrivedReferenceTime = time; }
void setOutputFrameScheduledReferenceTime(const BMDTimeValue time) { outputFrameScheduledReferenceTime = time; }
void setOutputFrameCompletedReferenceTime(const BMDTimeValue time) { outputFrameCompletedReferenceTime = time; }
void setOutputCompletionResult(const BMDOutputFrameCompletionResult result) { outputFrameCompletionResult = result; }
BMDTimeValue getVideoStreamTime() { return videoStreamTime; }
BMDTimeValue getVideoFrameDuration() { return videoFrameDuration; }
BMDTimeValue getInputFrameStartReferenceTime() { return inputFrameStartReferenceTime; }
BMDTimeValue getInputFrameArrivedReferenceTime() { return inputFrameArrivedReferenceTime; }
BMDTimeValue getOutputFrameScheduledReferenceTime() { return outputFrameScheduledReferenceTime; }
BMDTimeValue getOutputFrameCompletedReferenceTime() { return outputFrameCompletedReferenceTime; }
BMDOutputFrameCompletionResult getOutputCompletionResult() { return outputFrameCompletionResult; }
private:
std::atomic<ULONG> RefCount;
//
int width;
int height;
int rowBytes;
BMDPixelFormat pixelFormat;
BMDFrameFlags frameFlags;
std::vector<uint8_t> buffer;
// Timecode
BMDTimeValue videoStreamTime;
BMDTimeValue videoFrameDuration;
BMDTimeValue inputFrameStartReferenceTime;
BMDTimeValue inputFrameArrivedReferenceTime;
BMDTimeValue outputFrameScheduledReferenceTime;
BMDTimeValue outputFrameCompletedReferenceTime;
BMDOutputFrameCompletionResult outputFrameCompletionResult;
std::shared_ptr<Image> m_img;
};
#include "Utils/Image.h"
#include "Utils/Image.h"
#include "Utils/ComPtr.h"
#include "opencv2/core.hpp"
#include "Utils/Common.h"
/*
Image::Image(IDeckLinkVideoInputFrame* videoFrame)
{
#if USE_4K
if (videoFrame->GetWidth() != 3840 || videoFrame->GetHeight() != 2160)
{
return;
}
#else
if(videoFrame->GetWidth() != 1920 || videoFrame->GetHeight() != 1080)
{
return;
}
#endif
inVideoFrame = MakeComPtr<DeckLinkInputVideoFrame>(videoFrame);
ConvertDeckLinkVideoFrame2Mat(inVideoFrame, mat);
}
Image::Image(ComPtr<DeckLinkInputVideoFrame> videoFrame)
{
#if USE_4K
if (videoFrame->GetWidth() != 3840 || videoFrame->GetHeight() != 2160)
{
return;
}
#else
if (videoFrame->GetWidth() != 1920 || videoFrame->GetHeight() != 1080)
{
return;
}
#endif
inVideoFrame = videoFrame;
ConvertDeckLinkVideoFrame2Mat(videoFrame, mat);
}
Image::Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame)
{
#if USE_4K
if (videoFrame->GetWidth() != 3840 || videoFrame->GetHeight() != 2160)
{
return;
}
#else
if (videoFrame->GetWidth() != 1920 || videoFrame->GetHeight() != 1080)
{
return;
}
#endif
inVideoFrame = MakeComPtr<DeckLinkInputVideoFrame>(videoFrame.Get());
ConvertDeckLinkVideoFrame2Mat(inVideoFrame, mat);
}*/
Image::Image(std::shared_ptr<AVBuff> frame, int w, int h, BMDPixelFormat pixFmt, const qint64& curtimestamp, const qint64& sequence):
m_frame(frame),
m_width(w),
m_height(h),
m_pixFmt(pixFmt),
inputFrameCurTimeStamp(curtimestamp),
sequenceNum(sequence)
{
}
/*
Image::Image(const Image& other)
{
}
Image::Image(Image&& other)
{
mat = other.mat;
}
Image& Image::operator=(const Image& other)
{
return *this;
}
Image& Image::operator=(Image&& other)
{
mat = other.mat;
return *this;
}*/
Image::~Image()
{
}
bool Image::IsValid() const
{
return m_frame ? true : false;
}
uint8_t* Image::GetBytes() const
{
return m_frame->m_data;
}
#pragma once
#pragma once
#include <QtCore/QtCore>
#include <memory>
#include "opencv2/core.hpp"
#include "opencv2/imgproc.hpp"
#include "DeckLinkAPI.h"
#include "BlackMagicDesign/DeckLinkInputVideoFrame.h"
#include "Utils//AVBuffer.h"
class Image : public QObject
{
Q_OBJECT
public:
Image() {}
//Image(IDeckLinkVideoInputFrame* videoFrame);
//Image(ComPtr<DeckLinkInputVideoFrame> videoFrame);
Image(std::shared_ptr<AVBuff> frame, int w, int h, BMDPixelFormat pixFmt, const qint64& curtimestamp, const qint64& sequence);
//Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame);
Image(const Image& other)
{
m_frame = other.m_frame;
m_width = other.m_width;
m_height = other.m_height;
m_pixFmt = other.m_pixFmt;
inputFrameCurTimeStamp = other.inputFrameCurTimeStamp;
sequenceNum = other.sequenceNum;
};
//Image(Image&& other);
~Image();
//Image& operator=(const Image& other) { return *this; };
//Image& operator=(Image&& other);
//cv::Mat GetMat();
//void SetMat(cv::Mat& inMat);
uint8_t* GetBytes() const;
//int GetSize() const { return mat.rows * mat.cols * 4; }
bool IsValid() const;
int GetWidth() { return m_width; }
int GetHegiht() { return m_height; }
BMDPixelFormat GetPixelFormat() { return m_pixFmt; }
BMDFrameFlags GetFlags() { return bmdFrameFlagDefault; }
void setVideoStreamTime(const BMDTimeValue time) { videoStreamTime = time; }
void setVideoFrameDuration(const BMDTimeValue time) { videoFrameDuration = time; }
void setInputFrameStartReferenceTime(const BMDTimeValue time) { inputFrameStartReferenceTime = time; }
void setInputFrameArrivedReferenceTime(const BMDTimeValue time) { inputFrameArrivedReferenceTime = time; }
void setOutputFrameScheduledReferenceTime(const BMDTimeValue time) { outputFrameScheduledReferenceTime = time; }
void setOutputFrameCompletedReferenceTime(const BMDTimeValue time) { outputFrameCompletedReferenceTime = time; }
void setOutputCompletionResult(const BMDOutputFrameCompletionResult result) { outputFrameCompletionResult = result; }
BMDTimeValue getVideoStreamTime() { return videoStreamTime; }
BMDTimeValue getVideoFrameDuration() { return videoFrameDuration; }
BMDTimeValue getInputFrameStartReferenceTime() { return inputFrameStartReferenceTime; }
BMDTimeValue getInputFrameArrivedReferenceTime() { return inputFrameArrivedReferenceTime; }
BMDTimeValue getOutputFrameScheduledReferenceTime() { return outputFrameScheduledReferenceTime; }
BMDTimeValue getOutputFrameCompletedReferenceTime() { return outputFrameCompletedReferenceTime; }
BMDOutputFrameCompletionResult getOutputCompletionResult() { return outputFrameCompletionResult; }
int64_t getInputFrameCurTimeStamp() { return inputFrameCurTimeStamp; }
int64_t getInputFrameSequence() { return sequenceNum; }
//void GetMatByRoi(cv::Rect roi, cv::Mat& mat);
/*void Fill(void* dst, int dstSize)
{
void* buf;
if(inVideoFrame->GetBytes(&buf) == S_OK)
{
memcpy_s(dst, dstSize, buf, inVideoFrame->GetRowBytes() * inVideoFrame->GetHeight());
}
}*/
private:
//cv::Mat mat;
//ComPtr<DeckLinkInputVideoFrame> inVideoFrame;
// Timecode
BMDTimeValue videoStreamTime;
BMDTimeValue videoFrameDuration;
BMDTimeValue inputFrameStartReferenceTime;
BMDTimeValue inputFrameArrivedReferenceTime;
BMDTimeValue outputFrameScheduledReferenceTime;
BMDTimeValue outputFrameCompletedReferenceTime;
qint64 inputFrameCurTimeStamp;//
qint64 sequenceNum;//
BMDOutputFrameCompletionResult outputFrameCompletionResult;
int m_width;
int m_height;
BMDPixelFormat m_pixFmt;
std::shared_ptr<AVBuff> m_frame;
};
HRESULT ConvertDeckLinkVideoFrame2Mat(ComPtr<DeckLinkInputVideoFrame> videoFrame, cv::Mat& imageFrame);
\ No newline at end of file
#include <csignal>
#include <csignal>
#include <cstddef>
#include <cstring>
#include <cstdint>
#include <cstdio>
#include <cstdlib>
#include <atomic>
#include "NDI/NDIOutputThread.h"
#include "Utils/Common.h"
#include "libyuv.h"
extern qint64 StartTimeStamp;
NDIOutputThread::NDIOutputThread(const QString& Name, int w, int h) : NDISenderName(Name), width(w), height(h), isSending(false), Instance(nullptr),
m_lastTS(TimeMilliSecond()),
m_fps(0),
m_seqNum(0)
{
m_scale = new VideoScale(3840, 2160, AV_PIX_FMT_UYVY422, 1920, 1080, AV_PIX_FMT_UYVY422);
}
NDIOutputThread::~NDIOutputThread()
{
free(Frame.p_data);
NDIlib_send_destroy(Instance);
delete m_scale;
}
void NDIOutputThread::SetNDISenderName(const QString& Name)
{
NDISenderName = Name;
}
QString NDIOutputThread::GetNDISenderName() const
{
return NDISenderName;
}
void NDIOutputThread::SetNDIImageSize(int w, int h)
{
if (w <= 0 || h <= 0)
return;
width = w;
height = h;
}
bool NDIOutputThread::Init()
{
// Init NDIlib with static lib
if (!CheckValid())
return false;
if(Instance != nullptr)
{
free(Frame.p_data);
NDIlib_send_destroy(Instance);
}
NDIlib_send_create_t NDI_Send_Create_Desc;
std::string strname = NDISenderName.toStdString();
NDI_Send_Create_Desc.p_ndi_name = strname.c_str();
Instance = NDIlib_send_create(&NDI_Send_Create_Desc);
if (!Instance) return false;
// Provide a meta-data registration that allows people to know what we are. Note that this is optional.
// Note that it is possible for senders to also register their preferred video formats.
//NDIlib_metadata_frame_t NDI_connection_type;
//NDIlib_send_add_connection_metadata(Instance, &NDI_connection_type);
Frame.xres = width;
Frame.yres = height;
Frame.FourCC = NDIlib_FourCC_type_UYVY;
Frame.line_stride_in_bytes = Frame.xres * 2;
//Frame.p_data = (uint8_t*)malloc(Frame.xres * Frame.yres * 2);
Frame.frame_rate_D = 1;
Frame.frame_rate_N = 25;
Frame.frame_format_type = NDIlib_frame_format_type_progressive;
Frame.picture_aspect_ratio = 16.0 / 9;
//Frame.timecode = NDIlib_send_timecode_synthesize;
//Frame.timestamp = 0;
Frame.p_metadata = nullptr;
return true;
}
void NDIOutputThread::run()
{
ComPtr<IDeckLinkVideoInputFrame> frame;
void* srcBuff;
uint8_t* dstBuff;
int dstBuffSize;
uint64_t currTime, deltaTime;
if (!Init())
return;
while (true)
{
if (taskQueue.WaitFor(frame))
{
frame->GetBytes(&srcBuff);
m_scale->scale((uint8_t*)srcBuff, 0, &dstBuff, &dstBuffSize);
Frame.p_data = dstBuff;
Frame.timestamp = ++m_seqNum;
NDIlib_send_send_video_v2(Instance, &Frame);
av_free(dstBuff);
taskQueue.Pop(frame);
}
m_fps++;
currTime = TimeMilliSecond();
deltaTime = currTime - m_lastTS;
if (deltaTime >= 1000)
{
qDebug() << GetCurrDateTimeStr() << " NDI ouptut fps " << m_fps << ", qsize " << taskQueue.Size() << "\n";
m_fps = 0;
m_lastTS = currTime;
}
}
}
#if 0
void NDIOutputThread::run()
{
if (!Init())
return;
while(true)
{
START_WAIT_TIME_COUNTER
std::shared_ptr<Image> frame;
if(taskQueue.WaitFor(frame))
{
END_WAIT_TIME_COUNTER
START_TIME_COUNTER
if(frame->IsValid())
{
#if USE_4K
libyuv::ARGBScale(frame->GetBytes(), 3840 << 2, 3840, 2160,
Frame.p_data, 1920 << 2, 1920, 1080, libyuv::FilterMode::kFilterNone);
#else
frame->Fill(Frame.p_data, Frame.xres * Frame.yres * 4);
#endif
//qDebug() << "wgt-----------------------------"<<"ndi(" << NDISenderName << ")timestamp : " << frame->getInputFrameCurTimeStamp() - StartTimeStamp << "\n";
//Frame.timestamp = frame->getInputFrameCurTimeStamp();
Frame.timestamp = frame->getInputFrameSequence();
NDIlib_send_send_video_v2(Instance, &Frame);
}
taskQueue.Pop(frame);
END_TIME_COUNTER
}
}
}
#endif
void NDIOutputThread::Clear()
{
taskQueue.Reset();
}
void NDIOutputThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> frame)
{
START_SLOT_TIME_COUNTER
//if (!frame->IsValid())
// return;
//qDebug() << "NDIOutputThread::AddFrame\n";
taskQueue.Push(frame);
END_SLOT_TIME_COUNTER
}
#pragma once
#pragma once
#include <QThread>
#include <QMutex>
#include <QWaitCondition>
#include <queue>
#include "Processing.NDI.Advanced.h"
#include "Utils/Image.h"
#include "Utils/SampleQueue.h"
#include "Utils/VideoScale.h"
class NDIOutputThread : public QThread
{
Q_OBJECT
public:
NDIOutputThread(const QString& Name, int w, int h);
~NDIOutputThread();
void SetNDISenderName(const QString& Name);
QString GetNDISenderName() const;
void SetNDIImageSize(int w, int h);
int GetWidth() { return width; }
int GetHeight() { return height; }
bool IsSending() const { return isSending; }
bool CheckValid() const { return width > 0 && height > 0 && !NDISenderName.isEmpty(); }
void Clear();
public slots:
//void AddFrame(std::shared_ptr<Image> frame);
void AddFrame(ComPtr<IDeckLinkVideoInputFrame> frame);
private:
NDIOutputThread() : NDISenderName(""), width(-1), height(-1), Instance(nullptr), isSending(false) {}
bool Init();
virtual void run() override;
QString NDISenderName;
int width;
int height;
bool isSending;
//SampleQueue<std::shared_ptr<Image>> taskQueue;
SampleQueue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
NDIlib_send_instance_t Instance;
NDIlib_video_frame_v2_t Frame;
VideoScale* m_scale;
uint64_t m_lastTS;
int m_fps;
uint64_t m_seqNum;
};
\ No newline at end of file
#include "Threads/ProcessThread.h"
#include "Threads/ProcessThread.h"
#include "opencv2/opencv.hpp"
#include "libyuv.h"
#include "Utils//Common.h"
#define CUTBUFFERMAXSIZE 125*2
int ProcessThread::s_count = 0;
extern int OutputDeleyTime;
extern int FrameRate;
extern int FrontDeleyTime;
extern int BlackBottomHeight;
//extern int ScaleMode;
extern std::map<qint32, qint32> map_output_delay;
extern std::map<qint32, qint32> map_scale_mode;
ProcessThread::ProcessThread()
: sendFrames(0),
sendStartTime(QDateTime::currentMSecsSinceEpoch()),
recvROIs(0),
continuousLostNums(0),
recvROIStartTime(QDateTime::currentMSecsSinceEpoch()),
m_fps(0),
m_lastRecvTS(TimeMilliSecond())
{
idx = s_count++;
int key = idx + 1;
output_deley_time = map_output_delay[key];
minTaskImageQueueSize = FrontDeleyTime * FrameRate;
}
ProcessThread::~ProcessThread()
{
udpSocket->close();
udpSocket->deleteLater();
}
void ProcessThread::SetUpUDP(const QString hostAddr, const QString hostPort)
{
udpSocket = new QUdpSocket(this);
udpSocket->bind(QHostAddress(hostAddr), hostPort.toInt());
connect(udpSocket, SIGNAL(readyRead()), this, SLOT(ReadDatagrams()), Qt::DirectConnection);
}
void ProcessThread::AddFrame(std::shared_ptr<Image> image)
{
START_SLOT_TIME_COUNTER
if (image->IsValid()) {
taskImageQueue.Push(image);
if (taskImageQueue.Size() >= minTaskImageQueueSize && !firstMinSize)
{
firstMinSize = true;
}
}
END_SLOT_TIME_COUNTER
}
void ProcessThread::ReadDatagramsNew()
{
START_SLOT_TIME_COUNTER
QHostAddress inClientAddr;
quint16 inClientPort;
QByteArray data;
data.clear();
while (udpSocket->hasPendingDatagrams())
{
data.resize(udpSocket->pendingDatagramSize());
udpSocket->readDatagram(data.data(), data.size(), &inClientAddr, &inClientPort);
//qDebug()<< "recv time:" << QDateTime::currentMSecsSinceEpoch() << "\t" << "recv udp data:" << idx << "\t" << data.data() << "\n";
}
//qDebug() << "read udp data thread id:" << QThread::currentThreadId() << "\n";
if (!data.isEmpty())
{
taskROIQueue.Push(RoiMessage(data));
/*recvROIs++;
int64_t elapse = QDateTime::currentMSecsSinceEpoch() - recvROIStartTime;
if (elapse >= 5 * 1000) {
float val = recvROIs * 1000.0 / elapse;
qDebug() << fixed << qSetRealNumberPrecision(2) << "PPProcessThread::run::recvROIs::" << idx << "\t" << val << "\n";
recvROIs = 0;
recvROIStartTime = QDateTime::currentMSecsSinceEpoch();
}*/
}
END_SLOT_TIME_COUNTER
}
void ProcessThread::ReadDatagrams()
{
START_SLOT_TIME_COUNTER
QHostAddress inClientAddr;
quint16 inClientPort;
QByteArray data;
data.clear();
while(udpSocket->hasPendingDatagrams())
{
data.resize(udpSocket->pendingDatagramSize());
udpSocket->readDatagram(data.data(), data.size(), &inClientAddr, &inClientPort);
/*if (!data.isEmpty())
{
RoiMessage msg = RoiMessage(data);
qDebug() << "recv time:" << QDateTime::currentMSecsSinceEpoch() << "\t" << "recv udp data:" << idx << "\t" << data.data() << "chazhi:" << QDateTime::currentMSecsSinceEpoch() - msg.Timecode()<< "\n";
}*/
}
//qDebug() << "read udp data thread id:" << QThread::currentThreadId() << "\n";
if(!data.isEmpty())
{
taskROIQueue.Push(RoiMessage(data));
recvROIs++;
int64_t elapse = QDateTime::currentMSecsSinceEpoch() - recvROIStartTime;
if (elapse >= 5 * 1000) {
float val = recvROIs * 1000.0 / elapse;
qDebug() << fixed << qSetRealNumberPrecision(2) << "PPProcessThread::run::recvROIs::" << idx << "\t" << val << "\n";
recvROIs = 0;
recvROIStartTime = QDateTime::currentMSecsSinceEpoch();
}
}
END_SLOT_TIME_COUNTER
}
void ProcessThread::cutRunFrontFixedRegion()
{
RoiMessage roiMsg(560, 0, 800, 1080);
while (true)
{
std::shared_ptr<Image> image = nullptr;
if (taskImageQueue.WaitFor()) {
if (taskImageQueue.Front(image))
{
if (!image)
{
taskImageQueue.Pop();
continue;
}
WorkCutImage(image, roiMsg);
taskImageQueue.Pop();
}
}
}
}
void ProcessThread::cutRunFront()
{
bool continue_flag = false;
int min_size = FrontDeleyTime * FrameRate;
while (true)
{
std::shared_ptr<Image> image = nullptr;
if (taskImageQueue.WaitFor() && taskImageQueue.Size() >= min_size)//有横屏数据了
//if (taskImageQueue.WaitFor() && firstMinSize)
{
while (taskROIQueue.Size()) //有roimsg数据
{
RoiMessage roi;
taskROIQueue.Pop(roi);
cutRuleMap[roi.Timecode()] = roi;
}
if (taskImageQueue.Front(image))
{
if (!image)
{
taskImageQueue.Pop();
continue;
}
auto timestamp = image->getInputFrameCurTimeStamp();
auto sequence = image->getInputFrameSequence();
//qDebug() << "idx:" << idx << " ,current raw data:" << sequence << "\n";
if (cutRuleMap.size())
{
auto itor = cutRuleMap.find(sequence);
if (itor != cutRuleMap.end())
{
//qDebug() << "idx:" << idx << "find rule,sequence: " << sequence << "\n";
lastReceiveMessage = itor->second;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
}
else
{
if (sequence < cutRuleMap.begin()->first)
{
qDebug() << "idx:" << idx << " sequence: " << sequence << " too small " << "\n";
lastReceiveMessage = cutRuleMap.begin()->second;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
}
else
{
auto end = cutRuleMap.end(); end--;
if (end->second.Timecode() > sequence)
{
auto upper_itor = cutRuleMap.upper_bound(sequence);
qDebug() << "idx:" << idx << " sequence: " << sequence << " losted cut rule ..... and find upper sequence:"<<upper_itor->second.Timecode() << "\n";
auto roimsg = upper_itor->second;
if (upper_itor != cutRuleMap.begin())
{
auto tmp_itor = upper_itor;
--tmp_itor;
auto roi_front = tmp_itor->second;
int add = roimsg.X() + roi_front.X();
lastReceiveMessage.SetX(add / 2);
cutRuleMap[sequence] = lastReceiveMessage;
}
else lastReceiveMessage = roimsg;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
/*for (auto itor = cutRuleMap.begin(); itor != cutRuleMap.end(); itor++)
{
if (itor->first > sequence)
{
auto roimsg = itor->second;
if (itor != cutRuleMap.begin())
{
auto tmp_itor = itor;
--tmp_itor;
auto roi_front = tmp_itor->second;
int add = roimsg.X() + roi_front.X();
lastReceiveMessage.SetX(add / 2);
}
else lastReceiveMessage = roimsg;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
break;
}
}*/
continue;
}
else
{
qDebug() << "idx:" << idx << " sequence: " << sequence << " use last cut rule..... " << "\n";
//if (!taskROIQueue.WaitUntil(200))
//{
// //qDebug() << "idx:" << idx << " wait 200ms ,and not recv data!!!!!!!!!!!!" << endl;
// WorkCutImage(image, lastReceiveMessage);
// taskImageQueue.Pop();
// cutRuleMap.clear();
//}
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
cutRuleMap.clear();
continue;
}
}
}
}
else
{
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
}
}
}
/*else
{
qDebug() << "idx:" << idx << "taskImageQueue size:"<< taskImageQueue.Size() <<",min_size:"<< min_size << endl;
}*/
while (taskImageQueue.Size() > (FrameRate * (output_deley_time / 1000 + FrontDeleyTime)))
{
qDebug() << "idx:" << idx << ",----------------lost Image size:" << taskImageQueue.Size() - (output_deley_time / 1000 + FrontDeleyTime) << endl;
taskImageQueue.Pop(image);
}
if (cutRuleMap.size() > CUTBUFFERMAXSIZE && image)
{
//qDebug() << " idx:" << idx << "rule map start clear data,more size:"<< cutRuleMap.size() - 125 << "\n";
for (auto it = cutRuleMap.begin(); it != cutRuleMap.end();)
{
if (it->first < image->getInputFrameSequence() && cutRuleMap.size() > 5)
{
cutRuleMap.erase(it++);
}
else
{
++it;
}
}
}
}
}
void ProcessThread::cutRun()
{
bool continue_flag = false;
while (true)
{
std::shared_ptr<Image> image = nullptr;
if (taskImageQueue.WaitFor())//有横屏数据了
{
while (taskROIQueue.Size()) //有roimsg数据
{
RoiMessage roi;
taskROIQueue.Pop(roi);
cutRuleMap[roi.Timecode()] = roi;
}
if (taskImageQueue.Front(image))
{
qDebug() << "idx:" <<idx << " ,current raw data:"<<image->getInputFrameCurTimeStamp() << "\n";
if (!image)
{
taskImageQueue.Pop();
continue;
}
if (cutRuleMap.size())
{
auto itor = cutRuleMap.find(image->getInputFrameCurTimeStamp());
if (itor != cutRuleMap.end())
{
qDebug() << "idx:" << idx << "find rule,timecode: "<<image->getInputFrameCurTimeStamp() << "\n";
lastReceiveMessage = itor->second;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
}
else
{
if (image->getInputFrameCurTimeStamp() < cutRuleMap.begin()->first)
{
qDebug() << "idx:" << idx << " timecode: " << image->getInputFrameCurTimeStamp()<<" too small " << "\n";
lastReceiveMessage = cutRuleMap.begin()->second;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
}
else
{
auto end = cutRuleMap.end(); end--;
if (end->second.Timecode() > image->getInputFrameCurTimeStamp())
{
qDebug() << "idx:" << idx << " timecode: " << image->getInputFrameCurTimeStamp() << " do lost frame ..... " << "\n";
for (auto itor = cutRuleMap.begin(); itor != cutRuleMap.end(); itor++)
{
if (itor->first > image->getInputFrameCurTimeStamp())
{
auto roimsg = itor->second;
if (itor != cutRuleMap.begin())
{
auto tmp_itor = itor;
--tmp_itor;
auto roi_front = tmp_itor->second;
int add = roimsg.X() + roi_front.X();
lastReceiveMessage.SetX(add / 2);
}
else lastReceiveMessage = roimsg;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
break;
}
}
continue;
}
else
{
qDebug() << "idx:" << idx << " timecode: " << image->getInputFrameCurTimeStamp() << " wait cut rule..... " << "\n";
if (!taskROIQueue.WaitUntil(200))
{
qDebug() << "idx:" << idx << " wait 200ms ,and not recv data!!!!!!!!!!!!" << endl;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
cutRuleMap.clear();
}
continue;
}
}
//if (continue_flag)
//{
// continue_flag = false;
// continue;
//}
//if (image->getInputFrameCurTimeStamp() < cutRuleMap.begin()->first)
//{
// WorkCutImage(image, lastReceiveMessage);
// taskImageQueue.Pop();
//}
//else
//{
// //taskROIQueue.WaitFor();
// taskROIQueue.WaitUntil(50);
// continue;
//}
}
}
else
{
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
}
}
}
while (taskImageQueue.Size() > (FrameRate * (output_deley_time / 1000)))
{
qDebug() << "idx:" << idx << ",----------------lost Image size:" << taskImageQueue.Size() - (FrameRate * output_deley_time / 1000) << endl;
taskImageQueue.Pop(image);
//qDebug() << "idx:" << idx << ",lost timestamp:" << image->getInputFrameCurTimeStamp() << "\n";
/*auto find_itor = cutRuleMap.find(image->getInputFrameCurTimeStamp());
if (find_itor == cutRuleMap.end())
{
auto end = cutRuleMap.end(); end--;
if (end->second.Timecode() > image->getInputFrameCurTimeStamp())
{
for (auto itor = cutRuleMap.begin(); itor != cutRuleMap.end(); itor++)
{
if (itor->first > image->getInputFrameCurTimeStamp())
{
auto roimsg = itor->second;
if (itor != cutRuleMap.begin())
{
auto tmp_itor = itor;
--tmp_itor;
auto roi_front = tmp_itor->second;
int add = roimsg.X() + roi_front.X();
lastReceiveMessage.SetX(add / 2);
}
else lastReceiveMessage = roimsg;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
break;
}
}
}
else
{
lastReceiveMessage = find_itor->second;
WorkCutImage(image, lastReceiveMessage);
}
}
else
{
WorkCutImage(image, lastReceiveMessage);
}*/
}
if(cutRuleMap.size() > 125)
{
//qDebug() << " idx:" << idx << "rule map start clear data,more size:"<< cutRuleMap.size() - 125 << "\n";
for (auto it = cutRuleMap.begin(); it != cutRuleMap.end();)
{
if (it->first < image->getInputFrameCurTimeStamp() && cutRuleMap.size() > 5)
{
cutRuleMap.erase(it++);
}
else
{
++it;
}
}
}
}
}
void ProcessThread::run()
{
//cutRun();
cutRunFront();
//cutRunFrontFixedRegion();
}
void ProcessThread::WorkCutImage(std::shared_ptr<Image>& pImage, RoiMessage& roi)
{
//roi = lastReceiveMessage;
//START_TIME_COUNTER_BASE(OpenCV)
/*cv::Rect cvroi(roi.X(), roi.Y(), roi.Width(), roi.Height());
cv::Mat mat = image->GetMat().clone();
cv::UMat umat4Image(mat.rows, mat.cols, CV_8UC3);
cv::UMat umat4RotatedImage(810, 1080, CV_8UC3);
cv::cvtColor(mat, umat4Image, cv::COLOR_BGRA2BGR);
cv::UMat umat4ClippedImage = umat4Image(cvroi);
cv::rotate(umat4ClippedImage, umat4RotatedImage, cv::ROTATE_90_CLOCKWISE);
cv::UMat umat4ResizedImage;
cv::resize(umat4RotatedImage, umat4ResizedImage, cv::Size(1440, 1080));
cv::UMat umat4FinalImage = cv::UMat::zeros(cv::Size(1920, 1080), CV_8UC3);
umat4ResizedImage.copyTo(umat4FinalImage(cv::Rect(240, 0, 1440, 1080)));
cv::Mat finalmat;
cv::cvtColor(umat4FinalImage, finalmat, cv::COLOR_BGR2BGRA);
image->SetMat(finalmat);*/
/*std::string tag = "ProcessThread::run::start::" + std::to_string(idx);
const char* tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
//if (taskImageQueue.Size() >= 4)
// qDebug() << "ProcessThread::run::qsize::" << idx << "\t" << taskImageQueue.Size() << "\n";
//qDebug() << "ROI- " << idx << " " << roi.X() << " " << roi.Y() << " " << roi.Width() << " " << roi.Height() << "\n";
#if USE_4K
size_t size = roi.Width() * roi.Height() << 2 << 2;
uint8_t* buff1 = new uint8_t[size];
libyuv::ConvertToARGB(pImage->GetBytes(), (3840 * 2160 << 2), buff1, (roi.Height() << 2 << 1),
roi.X() << 1, roi.Y() << 1, 3840, 2160, roi.Width() << 1, roi.Height() << 1,
libyuv::kRotate90, libyuv::FOURCC_ARGB);
uint8_t* buff2 = new uint8_t[1440 * 1080 << 2];
libyuv::ARGBScale(buff1, (roi.Height() << 2 << 1), roi.Height() << 1, roi.Width() << 1,
buff2, 1440 << 2, 1440, 1080, libyuv::FilterMode::kFilterNone);
uint8_t* buff3 = (uint8_t*)av_malloc(1920 * 1080 << 2);
libyuv::ARGBCopy(buff2, 1440 << 2, (buff3 + BlackBottomHeight * 4), 1920 << 2, 1440, 1080);
delete[] buff1;
delete[] buff2;
std::shared_ptr<Image> img = std::make_shared<Image>(
std::make_shared<AVBuff>(buff3), 1920, 1080, bmdFormat8BitBGRA,
pImage->getInputFrameCurTimeStamp(), pImage->getInputFrameSequence());
emit PushFrame(img);
uint64_t currTime, deltaTime;
int qsize;
m_fps++;
qsize = taskImageQueue.Size();
currTime = TimeMilliSecond();
deltaTime = currTime - m_lastRecvTS;
if (deltaTime >= 1000)
{
qDebug() << GetCurrDateTimeStr() << " crop fps " << m_fps << ", qsize " << qsize << "\n";
m_fps = 0;
m_lastRecvTS = currTime;
}
#elif USE_1080P
//qDebug() << "wgt------------idx:" << idx << ",cut image of timestamp : " << pImage->getInputFrameCurTimeStamp() << ", cur image of x : " << roi.X() << "\n";
size_t size = roi.Width() * roi.Height() << 2;
uint8_t* buff1 = new uint8_t[size];
libyuv::ConvertToARGB(pImage->GetBytes(), (1920 * 1080 << 2), buff1, (roi.Height() << 2),
roi.X(), roi.Y(), 1920, 1080, roi.Width(), roi.Height(),
libyuv::kRotate90, libyuv::FOURCC_ARGB);
int key = idx + 1;
int scale_mode = map_scale_mode[key];
uint8_t* buff2 = new uint8_t[1440 * 1080 << 2];
libyuv::ARGBScale(buff1, (roi.Height() << 2), roi.Height(), roi.Width(),
buff2, 1440 << 2, 1440, 1080, libyuv::FilterMode(scale_mode));
uint8_t* buff3 = new uint8_t[1920 * 1080 << 2];
memset(buff3, 0, (1920 * 1080 << 2));
libyuv::ARGBCopy(buff2, 1440 << 2, (buff3 + BlackBottomHeight * 4), 1920 << 2, 1440, 1080);
cv::Mat bgra = cv::Mat(1080, 1920, CV_8UC4, buff3);
pImage->SetMat(bgra);
delete[] buff1;
delete[] buff2;
delete[] buff3;
//image->GetMatByRoi(cvroi, mat);
//mat.copyTo(umat4Image);
//cv::rotate(umat4Image, umat4RotatedImage, cv::ROTATE_90_CLOCKWISE);
//cv::resize(umat4RotatedImage, umat4FinalImage, cv::Size(1920, 1080));
//umat4FinalImage.copyTo(mat);
//END_TIME_COUNTER_BASE(OpenCV)
/*tag = "ProcessThread::run::end::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
//qDebug() << "ProcessThread image current time222:" << image->getInputFrameCurTimeStamp() << "\n";
emit PushFrame(pImage);
//DEBUG_FUNCTION("taskImageQueue Size: ", taskImageQueue.Size())
sendFrames++;
int elapse = (QDateTime::currentMSecsSinceEpoch() - sendStartTime) / 1000;
if (elapse >= 2) {
int fps = sendFrames / elapse;
qDebug() << "ProcessThread::run::fps::" << idx << "\t" << fps << "\n";
sendFrames = 0;
sendStartTime = QDateTime::currentMSecsSinceEpoch();
}
#else
WorkCutImage720p(pImage,roi);
#endif
}
void ProcessThread::WorkCutImage720p(std::shared_ptr<Image>& pImage, RoiMessage& roi)
{
#if 0
size_t size = roi.Width() * roi.Height() << 2;
uint8_t* buff1 = new uint8_t[size];
libyuv::ConvertToARGB(pImage->GetBytes(), (1920 * 1080 << 2), buff1, (roi.Height() << 2),
roi.X(), roi.Y(), 1920, 1080, roi.Width(), roi.Height(),
libyuv::kRotate90, libyuv::FOURCC_ARGB);
/*uint8_t* buff2 = new uint8_t[1440 * 1080 << 2];
libyuv::ARGBScale(buff1, (roi.Height() << 2), roi.Height(), roi.Width(),
buff2, 1440 << 2, 1440, 1080, libyuv::FilterMode::kFilterNone);*/
uint8_t* buff3 = new uint8_t[1280 * 720 << 2];
memset(buff3, 0, (1280 * 720 << 2));
libyuv::ARGBCopy(buff1, (roi.Height() << 2), (buff3 + 100 * 4), 1280 << 2, roi.Height(), roi.Width());
cv::Mat bgra = cv::Mat(720, 1280, CV_8UC4, buff3);
pImage->SetMat(bgra);
delete[] buff1;
//delete[] buff2;
delete[] buff3;
emit PushFrame(pImage);
#endif
}
void ProcessThread::Clear()
{
taskROIQueue.Reset();
taskImageQueue.Reset();
}
#pragma once
#pragma once
#include <QThread>
#include <QMutex>
#include <QWaitCondition>
#include <QUdpSocket>
#include <QJsonDocument>
#include <map>
#include "Utils/SampleQueue.h"
#include "Utils/Image.h"
const QString MODE_CLEAR = "no_mode";
const QString MODE_CROP = "crop_roi";
const QString MODE_STOP = "stop";
const QString MODE_ACK = "checked_ok";
#define CROP1080WIDTH 810
#define CROP720WIDTH 720
#define CROPHEIGHT 1080
class RoiMessage
{
public:
#if USE_1080P
RoiMessage() : w(CROP1080WIDTH), h(CROPHEIGHT)
{
x = 1920 / 2 - w / 2;
y = 1080 / 2 - h / 2;
timecode = 0;
}
#else
RoiMessage() : w(CROP720WIDTH), h(CROPHEIGHT)
{
x = 1920 / 2 - w / 2;
y = 1080 / 2 - h / 2;
timecode = 0;
}
#endif
RoiMessage(QByteArray& data)
{
QJsonDocument document = QJsonDocument::fromJson(data);
QJsonObject object;
if (document.isObject())
{
object = document.object();
mode = object.value("signal").toString();
QJsonArray roi = object.value("roi").toArray();
int minx = roi[0].toInt();
int miny = roi[1].toInt();
int maxx = roi[2].toInt();
int maxy = roi[3].toInt();
id = object.value("id").toInt();
width = object.value("width").toInt();
height = object.value("height").toInt();
timecode = QString::number(object.value("timecode").toDouble(), 'f', 0).toLongLong();
w = maxx - minx;
h = maxy - miny;
x = minx;
y = miny;
}
}
RoiMessage(QByteArray&& data)
{
QJsonDocument document = QJsonDocument::fromJson(data);
QJsonObject object;
if (document.isObject())
{
object = document.object();
mode = object.value("signal").toString();
QJsonArray roi = object.value("roi").toArray();
int minx = roi[0].toInt();
int miny = roi[1].toInt();
int maxx = roi[2].toInt();
int maxy = roi[3].toInt();
id = object.value("id").toInt();
width = object.value("width").toInt();
height = object.value("height").toInt();
timecode = QString::number(object.value("timecode").toDouble(), 'f', 0).toLongLong();
w = maxx - minx;
h = maxy - miny;
x = minx;
y = miny;
}
}
RoiMessage(const RoiMessage& other) : x(other.x), y(other.y), w(other.w), h(other.h), timecode(other.timecode)
{
}
RoiMessage(RoiMessage&& other)
{
x = other.x;
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
}
RoiMessage operator=(const RoiMessage& other)
{
x = other.x;
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
return *this;
}
RoiMessage operator=(RoiMessage&& other)
{
x = other.x;
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
return *this;
}
RoiMessage(int X, int Y, int W, int H)
:x(X),
y(Y),
w(W),
h(H),
timecode(0) {}
bool IsValid()
{
return x > 0 && y > 0 && w > 0 && h > 0;
}
void SetX(int x_)
{
this->x = x_;
}
int X() { return x; }
int Y() { return y; }
int Width() { return w; }
int Height() { return h; }
qint64 Timecode() { return timecode; }
private:
int x;
int y;
int w;
int h;
QString mode;
qint64 timecode;
int id = 0;
int width;
int height;
};
class ProcessThread : public QThread
{
Q_OBJECT
public:
ProcessThread();
~ProcessThread();
void SetUpUDP(const QString hostAddr, const QString hostPort);
public slots:
void AddFrame(std::shared_ptr<Image> image);
void ReadDatagrams();
void ReadDatagramsNew();
signals:
void PushFrame(std::shared_ptr<Image> image);
protected:
void run() override;
private:
void WorkCutImage(std::shared_ptr<Image>& pImage, RoiMessage& roi);
void WorkCutImage720p(std::shared_ptr<Image>& pImage, RoiMessage& roi);
void cutRun();
void cutRunFront();
void cutRunFrontFixedRegion();
private:
//SampleQueue<Image> taskPrerollQueue;
SampleQueue<std::shared_ptr<Image>> taskImageQueue;
SampleQueue<RoiMessage> taskROIQueue;
std::map<qint64, RoiMessage> cutRuleMap;
QUdpSocket* udpSocket;
//
uint32_t videoPrerollSize;
RoiMessage lastReceiveMessage;
void Clear();
static int s_count;
int idx;
int output_deley_time;
int sendFrames;
int64_t sendStartTime;
int recvROIs;
int64_t recvROIStartTime;
int continuousLostNums;//ʾղudp
int minTaskImageQueueSize;
bool firstMinSize = false;
int m_fps;
uint64_t m_lastRecvTS;
};
\ No newline at end of file
diff --git a/MomentaMedia/MomentaMedia.vcxproj b/MomentaMedia/MomentaMedia.vcxproj
diff --git a/MomentaMedia/MomentaMedia.vcxproj b/MomentaMedia/MomentaMedia.vcxproj
index 91b8a19..9c2d082 100644
--- a/MomentaMedia/MomentaMedia.vcxproj
+++ b/MomentaMedia/MomentaMedia.vcxproj
@@ -70,11 +70,11 @@
</ItemDefinitionGroup>
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<ClCompile>
- <AdditionalIncludeDirectories>.\ThirdParty\libyuv\include;.\ThirdParty\OpenCV\include;.\ThirdParty\NewTek\include;.\ThirdParty\BlackmagicDesign\include;.\include;%(AdditionalIncludeDirectories);$(Qt_INCLUDEPATH_)</AdditionalIncludeDirectories>
+ <AdditionalIncludeDirectories>.\ThirdParty\ffmpeg-master-latest-win64-gpl-shared\include;.\ThirdParty\libyuv\include;.\ThirdParty\OpenCV\include;.\ThirdParty\NewTek\include;.\ThirdParty\BlackmagicDesign\include;.\include;%(AdditionalIncludeDirectories);$(Qt_INCLUDEPATH_)</AdditionalIncludeDirectories>
</ClCompile>
<Link>
- <AdditionalLibraryDirectories>.\ThirdParty\libyuv\lib;.\ThirdParty\OpenCV\x64\vc15\lib;.\ThirdParty\NewTek\lib\x64;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
- <AdditionalDependencies>Processing.NDI.Lib.Advanced.x64.lib;opencv_world460.lib;yuv.lib;jpeg.lib;%(AdditionalDependencies);$(Qt_LIBS_)</AdditionalDependencies>
+ <AdditionalLibraryDirectories>.\ThirdParty\ffmpeg-master-latest-win64-gpl-shared\lib;.\ThirdParty\libyuv\lib;.\ThirdParty\OpenCV\x64\vc15\lib;.\ThirdParty\NewTek\lib\x64;%(AdditionalLibraryDirectories)</AdditionalLibraryDirectories>
+ <AdditionalDependencies>Processing.NDI.Lib.Advanced.x64.lib;opencv_world460.lib;yuv.lib;jpeg.lib;avformat.lib;avcodec.lib;avutil.lib;swscale.lib;%(AdditionalDependencies);$(Qt_LIBS_)</AdditionalDependencies>
<UACExecutionLevel>RequireAdministrator</UACExecutionLevel>
</Link>
</ItemDefinitionGroup>
@@ -133,6 +133,7 @@
<QtMoc Include="include\Threads\ProcessThread.h" />
<ClInclude Include="include\Orbit.h" />
<QtMoc Include="include\Utils\AudioPacket.h" />
+ <ClInclude Include="include\Utils\AVBuffer.h" />
<ClInclude Include="include\Utils\Common.h" />
<ClInclude Include="include\Utils\DispatchQueue.h" />
<ClInclude Include="include\Utils\LatencyStatistics.h" />
@@ -145,6 +146,7 @@
<ClInclude Include="include\Utils\ComPtr.h" />
<ClInclude Include="include\Utils\CustomEvents.h" />
<ClInclude Include="include\Utils\SampleQueue.h" />
+ <ClInclude Include="include\Utils\VideoScale.h" />
<ClInclude Include="ThirdParty\BlackmagicDesign\include\DeckLinkAPI.h" />
<ClInclude Include="ThirdParty\BlackmagicDesign\include\DeckLinkAPIVersion.h" />
</ItemGroup>
@@ -177,10 +179,12 @@
<ClCompile Include="src\Threads\CaptureThread.cpp" />
<ClCompile Include="src\Threads\ProcessThread.cpp" />
<ClCompile Include="src\Utils\AudioPacket.cpp" />
+ <ClCompile Include="src\Utils\AVBuffer.cpp" />
<ClCompile Include="src\Utils\Common.cpp" />
<ClCompile Include="src\Utils\Image.cpp" />
<ClCompile Include="src\Utils\LatencyStatistics.cpp" />
<ClCompile Include="src\Utils\Platform.cpp" />
+ <ClCompile Include="src\Utils\VideoScale.cpp" />
<ClCompile Include="ThirdParty\BlackmagicDesign\include\DeckLinkAPI_i.c" />
</ItemGroup>
<ItemGroup>
diff --git a/MomentaMedia/MomentaMedia.vcxproj.filters b/MomentaMedia/MomentaMedia.vcxproj.filters
index 02f06e9..8b2e5b0 100644
--- a/MomentaMedia/MomentaMedia.vcxproj.filters
+++ b/MomentaMedia/MomentaMedia.vcxproj.filters
@@ -145,6 +145,12 @@
<ClInclude Include="include\Orbit.h">
<Filter>Header Files\Utils</Filter>
</ClInclude>
+ <ClInclude Include="include\Utils\VideoScale.h">
+ <Filter>Header Files</Filter>
+ </ClInclude>
+ <ClInclude Include="include\Utils\AVBuffer.h">
+ <Filter>Header Files\Utils</Filter>
+ </ClInclude>
</ItemGroup>
<ItemGroup>
<QtRcc Include="Form\MomentaMedia.qrc">
@@ -229,6 +235,12 @@
<ClCompile Include="src\Utils\AudioPacket.cpp">
<Filter>Source Files\Utils</Filter>
</ClCompile>
+ <ClCompile Include="src\Utils\VideoScale.cpp">
+ <Filter>Source Files</Filter>
+ </ClCompile>
+ <ClCompile Include="src\Utils\AVBuffer.cpp">
+ <Filter>Source Files\Utils</Filter>
+ </ClCompile>
</ItemGroup>
<ItemGroup>
<QtUic Include="Form\MomentaMedia.ui">
diff --git a/MomentaMedia/MomentaMedia.vcxproj.user b/MomentaMedia/MomentaMedia.vcxproj.user
index ea662a6..83ca65a 100644
--- a/MomentaMedia/MomentaMedia.vcxproj.user
+++ b/MomentaMedia/MomentaMedia.vcxproj.user
@@ -5,9 +5,9 @@
<DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
</PropertyGroup>
<PropertyGroup Label="QtSettings" Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
- <QtLastBackgroundBuild>2022-07-06T03:07:22.4867692Z</QtLastBackgroundBuild>
+ <QtLastBackgroundBuild>2022-12-17T16:53:58.1087817Z</QtLastBackgroundBuild>
</PropertyGroup>
<PropertyGroup Label="QtSettings" Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
- <QtLastBackgroundBuild>2022-07-06T03:07:24.4163747Z</QtLastBackgroundBuild>
+ <QtLastBackgroundBuild>2022-12-17T16:53:58.1607839Z</QtLastBackgroundBuild>
</PropertyGroup>
</Project>
\ No newline at end of file
diff --git a/MomentaMedia/include/BlackMagicDesign/DeckLinkInputDevice.h b/MomentaMedia/include/BlackMagicDesign/DeckLinkInputDevice.h
index d6dcbf7..025176b 100644
--- a/MomentaMedia/include/BlackMagicDesign/DeckLinkInputDevice.h
+++ b/MomentaMedia/include/BlackMagicDesign/DeckLinkInputDevice.h
@@ -15,6 +15,7 @@
#include "Utils/Image.h"
#include "Utils/AudioPacket.h"
#include "Threads/CaptureThread.h"
+#include "NDI/NDIOutputThread.h"
class DeckLinkInputDevice : public QObject, public IDeckLinkInputCallback
{
@@ -56,6 +57,10 @@ public:
void SetCapture(const std::shared_ptr<CaptureThread>& capture) {
Capture = capture;
}
+
+ void SetNDIOutputThread(const std::shared_ptr<NDIOutputThread>& ndiOuptutThread) {
+ NDIOutput = ndiOuptutThread;
+ }
//void ForwardThread();
signals:
@@ -78,6 +83,7 @@ private:
//
std::shared_ptr<CaptureThread> Capture;
+ std::shared_ptr<NDIOutputThread> NDIOutput;
//std::queue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
//std::condition_variable cv;
@@ -85,6 +91,11 @@ private:
//std::thread thd;
int Index;
+ bool m_pushed;
+ bool PrevInputSignalAbsent;
+
+ int m_fps;
+ uint64_t m_lastRecvTS;
};
diff --git a/MomentaMedia/include/BlackMagicDesign/DeckLinkInputPage.h b/MomentaMedia/include/BlackMagicDesign/DeckLinkInputPage.h
index 2345190..677c8a4 100644
--- a/MomentaMedia/include/BlackMagicDesign/DeckLinkInputPage.h
+++ b/MomentaMedia/include/BlackMagicDesign/DeckLinkInputPage.h
@@ -62,7 +62,7 @@ private:
DeckLinkOpenGLWidget* PreviewView;
std::shared_ptr<CaptureThread> Capture;
- std::unique_ptr<NDIOutputThread> NDIOutput;
+ std::shared_ptr<NDIOutputThread> NDIOutput;
QFormLayout* FormLayout;
QComboBox* DeviceListCombo;
diff --git a/MomentaMedia/include/BlackMagicDesign/DeckLinkOutputDevice.h b/MomentaMedia/include/BlackMagicDesign/DeckLinkOutputDevice.h
index 91cc1d4..9ad476c 100644
--- a/MomentaMedia/include/BlackMagicDesign/DeckLinkOutputDevice.h
+++ b/MomentaMedia/include/BlackMagicDesign/DeckLinkOutputDevice.h
@@ -105,4 +105,7 @@ private:
qint32 Device_id;
qint32 output_deley_ms;//
qint32 audio_max_size;
+
+ int m_fps;
+ uint64_t m_lastRecvTS;
};
diff --git a/MomentaMedia/include/BlackMagicDesign/DeckLinkOutputVideoFrame.h b/MomentaMedia/include/BlackMagicDesign/DeckLinkOutputVideoFrame.h
index 34c387d..5fa3021 100644
--- a/MomentaMedia/include/BlackMagicDesign/DeckLinkOutputVideoFrame.h
+++ b/MomentaMedia/include/BlackMagicDesign/DeckLinkOutputVideoFrame.h
@@ -9,11 +9,11 @@
class DeckLinkOutputVideoFrame : public IDeckLinkVideoFrame
{
public:
- DeckLinkOutputVideoFrame();
- DeckLinkOutputVideoFrame(int w, int h, BMDFrameFlags flags, BMDPixelFormat pixelFormat);
- DeckLinkOutputVideoFrame(Image& image);
+ //DeckLinkOutputVideoFrame();
+ //DeckLinkOutputVideoFrame(int w, int h, BMDFrameFlags flags, BMDPixelFormat pixelFormat);
+ //DeckLinkOutputVideoFrame(Image& image);
DeckLinkOutputVideoFrame(std::shared_ptr<Image> image);
- DeckLinkOutputVideoFrame(Image&& image);
+ //DeckLinkOutputVideoFrame(Image&& image);
virtual ~DeckLinkOutputVideoFrame();
// IDeckLinkVideoFrame interface
@@ -72,4 +72,6 @@ private:
BMDTimeValue outputFrameCompletedReferenceTime;
BMDOutputFrameCompletionResult outputFrameCompletionResult;
+
+ std::shared_ptr<Image> m_img;
};
diff --git a/MomentaMedia/include/NDI/NDIOutputThread.h b/MomentaMedia/include/NDI/NDIOutputThread.h
index 4453550..381074b 100644
--- a/MomentaMedia/include/NDI/NDIOutputThread.h
+++ b/MomentaMedia/include/NDI/NDIOutputThread.h
@@ -7,6 +7,7 @@
#include "Processing.NDI.Advanced.h"
#include "Utils/Image.h"
#include "Utils/SampleQueue.h"
+#include "Utils/VideoScale.h"
class NDIOutputThread : public QThread
{
@@ -30,7 +31,8 @@ public:
void Clear();
public slots:
- void AddFrame(std::shared_ptr<Image> frame);
+ //void AddFrame(std::shared_ptr<Image> frame);
+ void AddFrame(ComPtr<IDeckLinkVideoInputFrame> frame);
private:
NDIOutputThread() : NDISenderName(""), width(-1), height(-1), Instance(nullptr), isSending(false) {}
@@ -46,8 +48,15 @@ private:
bool isSending;
- SampleQueue<std::shared_ptr<Image>> taskQueue;
+ //SampleQueue<std::shared_ptr<Image>> taskQueue;
+ SampleQueue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
NDIlib_send_instance_t Instance;
NDIlib_video_frame_v2_t Frame;
+
+ VideoScale* m_scale;
+
+ uint64_t m_lastTS;
+ int m_fps;
+ uint64_t m_seqNum;
};
\ No newline at end of file
diff --git a/MomentaMedia/include/Threads/CaptureThread.h b/MomentaMedia/include/Threads/CaptureThread.h
index e794288..c584609 100644
--- a/MomentaMedia/include/Threads/CaptureThread.h
+++ b/MomentaMedia/include/Threads/CaptureThread.h
@@ -4,6 +4,7 @@
#include <QMutex>
#include "Utils/Image.h"
#include "Utils/SampleQueue.h"
+#include "Utils/VideoScale.h"
#include <condition_variable>
#include <memory>
@@ -45,4 +46,9 @@ private:
std::condition_variable cv;
std::mutex mutex;
qint64 sequenceNum = 0;
+
+ VideoScale* m_scale;
+
+ int m_fps;
+ uint64_t m_lastRecvTS;
};
\ No newline at end of file
diff --git a/MomentaMedia/include/Threads/ProcessThread.h b/MomentaMedia/include/Threads/ProcessThread.h
index 08986b1..30562e2 100644
--- a/MomentaMedia/include/Threads/ProcessThread.h
+++ b/MomentaMedia/include/Threads/ProcessThread.h
@@ -215,4 +215,6 @@ private:
int minTaskImageQueueSize;
bool firstMinSize = false;
+ int m_fps;
+ uint64_t m_lastRecvTS;
};
\ No newline at end of file
diff --git a/MomentaMedia/include/Utils/Common.h b/MomentaMedia/include/Utils/Common.h
index e75ec6c..f0a88b9 100644
--- a/MomentaMedia/include/Utils/Common.h
+++ b/MomentaMedia/include/Utils/Common.h
@@ -2,6 +2,7 @@
#include "DeckLinkAPI.h"
#include <QDateTime.h>
+#include <sys/timeb.h>
#define USE_4K 1
#define USE_1080P 1
@@ -17,3 +18,10 @@ static QString GetCurrDateTimeStr()
return as;
}
+
+static uint64_t TimeMilliSecond()
+{
+ timeb now;
+ ftime(&now);
+ return now.time * 1000 + now.millitm;
+}
\ No newline at end of file
diff --git a/MomentaMedia/include/Utils/Image.h b/MomentaMedia/include/Utils/Image.h
index 9f9f303..77fd7ff 100644
--- a/MomentaMedia/include/Utils/Image.h
+++ b/MomentaMedia/include/Utils/Image.h
@@ -1,41 +1,51 @@
#pragma once
#include <QtCore/QtCore>
+#include <memory>
#include "opencv2/core.hpp"
#include "opencv2/imgproc.hpp"
#include "DeckLinkAPI.h"
#include "BlackMagicDesign/DeckLinkInputVideoFrame.h"
+#include "Utils//AVBuffer.h"
class Image : public QObject
{
Q_OBJECT
public:
- Image();
- Image(IDeckLinkVideoInputFrame* videoFrame);
- Image(ComPtr<DeckLinkInputVideoFrame> videoFrame);
- Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame,const qint64& curtimestamp,const qint64& sequence);
- Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame);
- Image(const Image& other);
- Image(Image&& other);
+ Image() {}
+ //Image(IDeckLinkVideoInputFrame* videoFrame);
+ //Image(ComPtr<DeckLinkInputVideoFrame> videoFrame);
+ Image(std::shared_ptr<AVBuff> frame, int w, int h, BMDPixelFormat pixFmt, const qint64& curtimestamp, const qint64& sequence);
+ //Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame);
+ Image(const Image& other)
+ {
+ m_frame = other.m_frame;
+ m_width = other.m_width;
+ m_height = other.m_height;
+ m_pixFmt = other.m_pixFmt;
+ inputFrameCurTimeStamp = other.inputFrameCurTimeStamp;
+ sequenceNum = other.sequenceNum;
+ };
+ //Image(Image&& other);
~Image();
- Image& operator=(const Image& other);
- Image& operator=(Image&& other);
+ //Image& operator=(const Image& other) { return *this; };
+ //Image& operator=(Image&& other);
- cv::Mat GetMat();
- void SetMat(cv::Mat& inMat);
+ //cv::Mat GetMat();
+ //void SetMat(cv::Mat& inMat);
uint8_t* GetBytes() const;
- int GetSize() const { return mat.rows * mat.cols * 4; }
+ //int GetSize() const { return mat.rows * mat.cols * 4; }
bool IsValid() const;
- int GetWidth() { return mat.cols; }
- int GetHegiht() { return mat.rows; }
- BMDPixelFormat GetPixelFormat() { return (BMDPixelFormat)bmdFormat8BitBGRA; }
+ int GetWidth() { return m_width; }
+ int GetHegiht() { return m_height; }
+ BMDPixelFormat GetPixelFormat() { return m_pixFmt; }
BMDFrameFlags GetFlags() { return bmdFrameFlagDefault; }
void setVideoStreamTime(const BMDTimeValue time) { videoStreamTime = time; }
@@ -56,19 +66,19 @@ public:
int64_t getInputFrameCurTimeStamp() { return inputFrameCurTimeStamp; }
int64_t getInputFrameSequence() { return sequenceNum; }
- void GetMatByRoi(cv::Rect roi, cv::Mat& mat);
+ //void GetMatByRoi(cv::Rect roi, cv::Mat& mat);
- void Fill(void* dst, int dstSize)
+ /*void Fill(void* dst, int dstSize)
{
void* buf;
if(inVideoFrame->GetBytes(&buf) == S_OK)
{
memcpy_s(dst, dstSize, buf, inVideoFrame->GetRowBytes() * inVideoFrame->GetHeight());
}
- }
+ }*/
private:
- cv::Mat mat;
- ComPtr<DeckLinkInputVideoFrame> inVideoFrame;
+ //cv::Mat mat;
+ //ComPtr<DeckLinkInputVideoFrame> inVideoFrame;
// Timecode
BMDTimeValue videoStreamTime;
@@ -84,6 +94,11 @@ private:
qint64 sequenceNum;//
BMDOutputFrameCompletionResult outputFrameCompletionResult;
+
+ int m_width;
+ int m_height;
+ BMDPixelFormat m_pixFmt;
+ std::shared_ptr<AVBuff> m_frame;
};
diff --git a/MomentaMedia/src/BlackMagicDesign/DeckLinkInputDevice.cpp b/MomentaMedia/src/BlackMagicDesign/DeckLinkInputDevice.cpp
index 32df632..86a4e20 100644
--- a/MomentaMedia/src/BlackMagicDesign/DeckLinkInputDevice.cpp
+++ b/MomentaMedia/src/BlackMagicDesign/DeckLinkInputDevice.cpp
@@ -20,7 +20,11 @@ DeckLinkInputDevice::DeckLinkInputDevice(QObject* parent, ComPtr<IDeckLink>& dev
bLastValidFrameStatus(false),
SupportedInputConnections(bmdVideoConnectionUnspecified),
SelectedInputConnection(bmdVideoConnectionUnspecified),
- Index(index)
+ Index(index),
+ m_pushed(true),
+ PrevInputSignalAbsent(false),
+ m_fps(0),
+ m_lastRecvTS(TimeMilliSecond())
{
//thd = std::thread(&DeckLinkInputDevice::ForwardThread, this);
}
@@ -83,10 +87,46 @@ HRESULT DeckLinkInputDevice::VideoInputFrameArrived(IDeckLinkVideoInputFrame* vi
//emit ArrivedFrame(frame);
if (videoFrame && Capture) {
unsigned flags = videoFrame->GetFlags();
- if (flags & bmdFrameHasNoInputSource)
- qDebug() << GetCurrDateTimeStr() << "index: " << Index << " DeckLinkInputDevice get video frame No input source " << hex << flags << " ------------ \n";
- auto cur_time = QDateTime::currentMSecsSinceEpoch();
- Capture->AddFrame(frame, cur_time);
+ bool noInputSourceFlag = false;
+ if (flags & bmdFrameHasNoInputSource) {
+ //qDebug() << GetCurrDateTimeStr() << "index: " << Index << " DeckLinkInputDevice get video frame No input source " << hex << flags << " ------------ \n";
+ //return S_OK;
+ noInputSourceFlag = true;
+ }
+ bool restartStream = !noInputSourceFlag && PrevInputSignalAbsent;
+ if (restartStream)
+ {
+ DeckLinkInput->StopStreams();
+ DeckLinkInput->FlushStreams();
+ DeckLinkInput->StartStreams();
+ }
+ PrevInputSignalAbsent = noInputSourceFlag;
+
+ if (noInputSourceFlag)
+ return S_OK;
+
+ if (m_pushed)
+ {
+ auto cur_time = QDateTime::currentMSecsSinceEpoch();
+ Capture->AddFrame(frame, cur_time);
+
+ if (NDIOutput)
+ NDIOutput->AddFrame(frame);
+ }
+ m_pushed = !m_pushed;
+
+ uint64_t currTime, deltaTime;
+
+ m_fps++;
+ currTime = TimeMilliSecond();
+ deltaTime = currTime - m_lastRecvTS;
+ if (deltaTime >= 1000)
+ {
+ qDebug() << GetCurrDateTimeStr() << " decklink input fps " << m_fps << "\n";
+
+ m_fps = 0;
+ m_lastRecvTS = currTime;
+ }
}
if(audioPacket)
{
@@ -129,6 +169,8 @@ HRESULT DeckLinkInputDevice::VideoInputFormatChanged(BMDVideoInputFormatChangedE
BMDPixelFormat pixelFormat;
BMDDisplayMode displayMode = newDisplayMode->GetDisplayMode();
+ return S_OK;
+
// Unexpected callback when auto-detect mode not enabled
if (!bApplyDetectedInputMode)
return E_FAIL;
@@ -166,6 +208,7 @@ HRESULT DeckLinkInputDevice::VideoInputFormatChanged(BMDVideoInputFormatChangedE
DeckLinkInput->StopStreams();
// Set the video input mode
+ //pixelFormat = bmdFormat8BitYUV;
result = DeckLinkInput->EnableVideoInput(displayMode, pixelFormat, bmdVideoInputEnableFormatDetection);
if (result == S_OK)
@@ -233,7 +276,7 @@ bool DeckLinkInputDevice::StartCapture(BMDDisplayMode displayMode, IDeckLinkScre
DeckLinkInput->SetCallback(this);
// Set the video input mode
- if (DeckLinkInput->EnableVideoInput(displayMode, bmdFormat10BitYUV, videoInputFlags) != S_OK)
+ if (DeckLinkInput->EnableVideoInput(bmdMode4K2160p50, bmdFormat8BitYUV, bmdVideoInputFlagDefault) != S_OK)
return false;
if (DeckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, bmdAudioSampleType16bitInteger, AudioChannel) != S_OK)
diff --git a/MomentaMedia/src/BlackMagicDesign/DeckLinkInputPage.cpp b/MomentaMedia/src/BlackMagicDesign/DeckLinkInputPage.cpp
index 22a70c6..b0b5bbe 100644
--- a/MomentaMedia/src/BlackMagicDesign/DeckLinkInputPage.cpp
+++ b/MomentaMedia/src/BlackMagicDesign/DeckLinkInputPage.cpp
@@ -255,6 +255,7 @@ void DeckLinkInputPage::RequestedDeviceGranted(ComPtr<IDeckLink>& device)
//connect(SelectedDevice.Get(), SIGNAL(ArrivedFrame(ComPtr<IDeckLinkVideoInputFrame>)), Capture.get(), SLOT(AddFrame(ComPtr<IDeckLinkVideoInputFrame>)));
SelectedDevice.Get()->SetCapture(Capture);
+ SelectedDevice.Get()->SetNDIOutputThread(NDIOutput);
SelectedDeviceChanged();
StartCapture();
@@ -313,9 +314,12 @@ void DeckLinkInputPage::ObjectNameChanged(const QString& newName)
START_SLOT_TIME_COUNTER
NDINameLabel->setText(QString("NDIOutput") + newName.at(newName.size() - 1));
- NDIOutput = std::make_unique<NDIOutputThread>(NDINameLabel->text(), 1920, 1080);
+ NDIOutput = std::make_shared<NDIOutputThread>(NDINameLabel->text(), 1920, 1080);
- connect(Capture.get(), SIGNAL(PushFrame(std::shared_ptr<Image>)), NDIOutput.get(), SLOT(AddFrame(std::shared_ptr<Image>)));
+ //SelectedDevice.Get()->SetNDIOutputThread(NDIOutput);
+
+ //connect(Capture.get(), SIGNAL(PushFrame(std::shared_ptr<Image>)), NDIOutput.get(), SLOT(AddFrame(std::shared_ptr<Image>)));
+ //connect(SelectedDevice.Get(), SIGNAL(ArrivedFrame(ComPtr<IDeckLinkVideoInputFrame>)), NDIOutput.get(), SLOT(AddFrame(ComPtr<IDeckLinkVideoInputFrame>)));
END_SLOT_TIME_COUNTER
}
diff --git a/MomentaMedia/src/BlackMagicDesign/DeckLinkOutputDevice.cpp b/MomentaMedia/src/BlackMagicDesign/DeckLinkOutputDevice.cpp
index 895af2d..f3b3c71 100644
--- a/MomentaMedia/src/BlackMagicDesign/DeckLinkOutputDevice.cpp
+++ b/MomentaMedia/src/BlackMagicDesign/DeckLinkOutputDevice.cpp
@@ -24,7 +24,9 @@ DeckLinkOutputDevice::DeckLinkOutputDevice(ComPtr<IDeckLink>& decklink, int vide
startPlaybackTime(0),
scheduledFrameCompletedCallback(nullptr),
first_sleep(false),
- Index(index)
+ Index(index),
+ m_fps(0),
+ m_lastRecvTS(TimeMilliSecond())
{
// Check that device has an output interface, this will throw an error if using a capture-only device such as DeckLink Mini Recorder
if (!deckLinkOutput)
@@ -146,7 +148,7 @@ bool DeckLinkOutputDevice::StartPlayback(BMDDisplayMode displayMode, bool enable
#endif
#if USE_4K
- outputDisplayMode = bmdModeHD1080i50;
+ outputDisplayMode = bmdModeHD1080p25;
#endif
@@ -390,6 +392,24 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
qDebug() << "other error code." << "\n";
}
}
+ else
+ {
+ m_fps++;
+ }
+
+ uint64_t currTime, deltaTime;
+ int qsize;
+
+ qsize = outputVideoFrameQueue.Size();
+ currTime = TimeMilliSecond();
+ deltaTime = currTime - m_lastRecvTS;
+ if (deltaTime >= 1000)
+ {
+ qDebug() << GetCurrDateTimeStr() << " decklink output fps " << m_fps << ", qsize " << qsize << "\n";
+
+ m_fps = 0;
+ m_lastRecvTS = currTime;
+ }
//if(deckLinkOutput->DisplayVideoFrameSync(videoFrame.Get()))
//{
@@ -539,8 +559,8 @@ void DeckLinkOutputDevice::AddFrame(std::shared_ptr<Image> image)
if (!image->IsValid())
return;
- if (outputVideoFrameQueue.Size() >= 4)
- qDebug() << "DeckLinkOutputDevice::AddFrame video---qsize:" << "\t" << outputVideoFrameQueue.Size() << "\n";
+ //if (outputVideoFrameQueue.Size() >= 4)
+ // qDebug() << "DeckLinkOutputDevice::AddFrame video---qsize:" << "\t" << outputVideoFrameQueue.Size() << "\n";
if (outputVideoFrameQueue.Size() > audio_max_size)
{
@@ -552,8 +572,8 @@ void DeckLinkOutputDevice::AddFrame(std::shared_ptr<Image> image)
void DeckLinkOutputDevice::AddAudioFrame(std::shared_ptr<AudioPacket> audio_packet)
{
START_SLOT_TIME_COUNTER
- if (outputAudioFrameQueue.Size() >= 4)
- qDebug() << "index:" << Index << "DeckLinkOutputDevice::AddAudioFrame audio---qsize:" << "\t" << outputAudioFrameQueue.Size() << "\n";
+ //if (outputAudioFrameQueue.Size() >= 4)
+ //qDebug() << "index:" << Index << "DeckLinkOutputDevice::AddAudioFrame audio---qsize:" << "\t" << outputAudioFrameQueue.Size() << "\n";
if (outputAudioFrameQueue.Size() > audio_max_size)
{
diff --git a/MomentaMedia/src/BlackMagicDesign/DeckLinkOutputPage.cpp b/MomentaMedia/src/BlackMagicDesign/DeckLinkOutputPage.cpp
index 6f0eb82..fc7225a 100644
--- a/MomentaMedia/src/BlackMagicDesign/DeckLinkOutputPage.cpp
+++ b/MomentaMedia/src/BlackMagicDesign/DeckLinkOutputPage.cpp
@@ -266,6 +266,9 @@ void DeckLinkOutputPage::RequestedDeviceGranted(ComPtr<IDeckLink>& device)
SelectedDeviceChanged();
StartOutput();
+
+ SelectedDevice->StartPlayback(bmdModeHD1080p50, false, bmdFormat8BitBGRA, false, PreviewView->GetDelegate());
+
END_SLOT_TIME_COUNTER
}
diff --git a/MomentaMedia/src/BlackMagicDesign/DeckLinkOutputVideoFrame.cpp b/MomentaMedia/src/BlackMagicDesign/DeckLinkOutputVideoFrame.cpp
index d2a9f23..3d2b3a3 100644
--- a/MomentaMedia/src/BlackMagicDesign/DeckLinkOutputVideoFrame.cpp
+++ b/MomentaMedia/src/BlackMagicDesign/DeckLinkOutputVideoFrame.cpp
@@ -9,7 +9,7 @@
#include <cstdint>
-
+#if 0
DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame() : RefCount(1), width(-1), height(-1), rowBytes(-1), frameFlags(bmdFrameFlagDefault), pixelFormat(bmdFormat8BitBGRA)
{
@@ -35,22 +35,24 @@ DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame(Image& image)
memcpy(buffer.data(), image.GetBytes(), height * rowBytes);
}
+#endif
DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame(std::shared_ptr<Image> image)
: RefCount(1),
width(image->GetWidth()),
height(image->GetHegiht()),
frameFlags(image->GetFlags()),
- pixelFormat(image->GetPixelFormat())
+ pixelFormat(image->GetPixelFormat()),
+ m_img(image)
{
rowBytes = GetRowBytesFromPixelFormat(width, pixelFormat);
- buffer.resize(height * rowBytes);
+ //buffer.resize(height * rowBytes);
- memcpy(buffer.data(), image->GetBytes(), height * rowBytes);
+ //memcpy(buffer.data(), image->GetBytes(), height * rowBytes);
}
-
+#if 0
DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame(Image&& image)
: RefCount(1),
width(image.GetWidth()),
@@ -64,11 +66,12 @@ DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame(Image&& image)
memcpy(buffer.data(), image.GetBytes(), height * rowBytes);
}
+#endif
DeckLinkOutputVideoFrame::~DeckLinkOutputVideoFrame()
{
- buffer.clear();
+ //buffer.clear();
}
ULONG DeckLinkOutputVideoFrame::AddRef()
@@ -143,14 +146,6 @@ long DeckLinkOutputVideoFrame::GetRowBytes()
HRESULT DeckLinkOutputVideoFrame::GetBytes(void** buf)
{
- if(buffer.empty())
- {
- *buf = nullptr;
- return E_FAIL;
- }
- else
- {
- *buf = buffer.data();
- return S_OK;
- }
+ *buf = m_img->GetBytes();
+ return S_OK;
}
diff --git a/MomentaMedia/src/NDI/NDIOutputThread.cpp b/MomentaMedia/src/NDI/NDIOutputThread.cpp
index 3b21ea3..539256a 100644
--- a/MomentaMedia/src/NDI/NDIOutputThread.cpp
+++ b/MomentaMedia/src/NDI/NDIOutputThread.cpp
@@ -11,8 +11,12 @@
extern qint64 StartTimeStamp;
-NDIOutputThread::NDIOutputThread(const QString& Name, int w, int h) : NDISenderName(Name), width(w), height(h), isSending(false), Instance(nullptr)
+NDIOutputThread::NDIOutputThread(const QString& Name, int w, int h) : NDISenderName(Name), width(w), height(h), isSending(false), Instance(nullptr),
+ m_lastTS(TimeMilliSecond()),
+ m_fps(0),
+ m_seqNum(0)
{
+ m_scale = new VideoScale(3840, 2160, AV_PIX_FMT_UYVY422, 1920, 1080, AV_PIX_FMT_UYVY422);
}
NDIOutputThread::~NDIOutputThread()
@@ -20,6 +24,8 @@ NDIOutputThread::~NDIOutputThread()
free(Frame.p_data);
NDIlib_send_destroy(Instance);
+
+ delete m_scale;
}
void NDIOutputThread::SetNDISenderName(const QString& Name)
@@ -64,22 +70,67 @@ bool NDIOutputThread::Init()
// Provide a meta-data registration that allows people to know what we are. Note that this is optional.
// Note that it is possible for senders to also register their preferred video formats.
- NDIlib_metadata_frame_t NDI_connection_type;
- NDIlib_send_add_connection_metadata(Instance, &NDI_connection_type);
+ //NDIlib_metadata_frame_t NDI_connection_type;
+ //NDIlib_send_add_connection_metadata(Instance, &NDI_connection_type);
Frame.xres = width;
Frame.yres = height;
- Frame.FourCC = NDIlib_FourCC_type_BGRA;
- Frame.p_data = (uint8_t*)malloc(Frame.xres * Frame.yres * 4);
- Frame.line_stride_in_bytes = Frame.xres * 4;
- Frame.frame_rate_D = 1000;
- Frame.frame_rate_N = 25000;
+ Frame.FourCC = NDIlib_FourCC_type_UYVY;
+ Frame.line_stride_in_bytes = Frame.xres * 2;
+ //Frame.p_data = (uint8_t*)malloc(Frame.xres * Frame.yres * 2);
+ Frame.frame_rate_D = 1;
+ Frame.frame_rate_N = 25;
Frame.frame_format_type = NDIlib_frame_format_type_progressive;
Frame.picture_aspect_ratio = 16.0 / 9;
+ //Frame.timecode = NDIlib_send_timecode_synthesize;
+ //Frame.timestamp = 0;
+ Frame.p_metadata = nullptr;
return true;
}
+void NDIOutputThread::run()
+{
+ ComPtr<IDeckLinkVideoInputFrame> frame;
+ void* srcBuff;
+ uint8_t* dstBuff;
+ int dstBuffSize;
+ uint64_t currTime, deltaTime;
+
+ if (!Init())
+ return;
+
+ while (true)
+ {
+
+ if (taskQueue.WaitFor(frame))
+ {
+ frame->GetBytes(&srcBuff);
+ m_scale->scale((uint8_t*)srcBuff, 0, &dstBuff, &dstBuffSize);
+
+ Frame.p_data = dstBuff;
+ Frame.timestamp = ++m_seqNum;
+ NDIlib_send_send_video_v2(Instance, &Frame);
+
+ av_free(dstBuff);
+
+ taskQueue.Pop(frame);
+ }
+
+ m_fps++;
+ currTime = TimeMilliSecond();
+ deltaTime = currTime - m_lastTS;
+ if (deltaTime >= 1000)
+ {
+ qDebug() << GetCurrDateTimeStr() << " NDI ouptut fps " << m_fps << ", qsize " << taskQueue.Size() << "\n";
+
+ m_fps = 0;
+ m_lastTS = currTime;
+ }
+ }
+}
+
+#if 0
void NDIOutputThread::run()
{
if (!Init())
@@ -112,17 +163,19 @@ void NDIOutputThread::run()
}
}
}
+#endif
void NDIOutputThread::Clear()
{
taskQueue.Reset();
}
-void NDIOutputThread::AddFrame(std::shared_ptr<Image> frame)
+void NDIOutputThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> frame)
{
START_SLOT_TIME_COUNTER
- if (!frame->IsValid())
- return;
+ //if (!frame->IsValid())
+ // return;
+ //qDebug() << "NDIOutputThread::AddFrame\n";
taskQueue.Push(frame);
END_SLOT_TIME_COUNTER
diff --git a/MomentaMedia/src/Threads/CaptureThread.cpp b/MomentaMedia/src/Threads/CaptureThread.cpp
index 4a065e3..31cfadd 100644
--- a/MomentaMedia/src/Threads/CaptureThread.cpp
+++ b/MomentaMedia/src/Threads/CaptureThread.cpp
@@ -22,15 +22,18 @@ int CaptureThread::s_count = 0;
CaptureThread::CaptureThread()
: recvFrames(0),
idx(s_count++),
- recvStartTime(QDateTime::currentMSecsSinceEpoch())
+ recvStartTime(QDateTime::currentMSecsSinceEpoch()),
+ m_fps(0),
+ m_lastRecvTS(TimeMilliSecond())
//taskQueue(std::string("task")+ std::to_string(idx))
{
//idx = s_count++;
+ m_scale = new VideoScale(3840, 2160, AV_PIX_FMT_UYVY422, 3840, 2160, AV_PIX_FMT_BGRA);
}
CaptureThread::~CaptureThread()
{
-
+ delete m_scale;
}
void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const qint64& timestamp)
@@ -111,6 +114,13 @@ void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const
void CaptureThread::run()
{
+ void* srcBuff;
+ uint8_t* dstBuff;
+ int dstBuffSize;
+
+ uint64_t currTime, deltaTime;
+ int qsize;
+
while(true)
{
@@ -140,16 +150,35 @@ void CaptureThread::run()
cur_time = video_data.timestamp;
sequence = video_data.sequenceNum;
taskVideoQueue.pop();
+ qsize = taskVideoQueue.size();
}
if (videoFrame.Get() != nullptr)
{
//auto cur_time = QDateTime::currentMSecsSinceEpoch();
- //qDebug() << "input frame cur time:" << cur_time << "\n";
- std::shared_ptr<Image> image = std::make_shared<Image>(videoFrame,cur_time, sequence);
+ //qDebug() << "input frame cur time:" << cur_time << "\n"
+
+ videoFrame->GetBytes(&srcBuff);
+ m_scale->scale((uint8_t*)srcBuff, 0, &dstBuff, &dstBuffSize);
+
+ std::shared_ptr<Image> image = std::make_shared<Image>(
+ std::make_shared<AVBuff>(dstBuff), 3840, 2610, bmdFormat8BitBGRA, cur_time, sequence);
+
emit PushFrame(image);
+
+ m_fps++;
}
+
+ currTime = TimeMilliSecond();
+ deltaTime = currTime - m_lastRecvTS;
+ if (deltaTime >= 1000)
+ {
+ qDebug() << GetCurrDateTimeStr() << " capture scale fps " << m_fps << ", qsize " << qsize << "\n";
+
+ m_fps = 0;
+ m_lastRecvTS = currTime;
+ }
/*while (taskQueue.size() > 30)
{
//taskQueue.Pop(videoFrame);
diff --git a/MomentaMedia/src/Threads/ProcessThread.cpp b/MomentaMedia/src/Threads/ProcessThread.cpp
index b36d9c1..3863d82 100644
--- a/MomentaMedia/src/Threads/ProcessThread.cpp
+++ b/MomentaMedia/src/Threads/ProcessThread.cpp
@@ -19,7 +19,9 @@ ProcessThread::ProcessThread()
sendStartTime(QDateTime::currentMSecsSinceEpoch()),
recvROIs(0),
continuousLostNums(0),
- recvROIStartTime(QDateTime::currentMSecsSinceEpoch())
+ recvROIStartTime(QDateTime::currentMSecsSinceEpoch()),
+ m_fps(0),
+ m_lastRecvTS(TimeMilliSecond())
{
idx = s_count++;
int key = idx + 1;
@@ -183,7 +185,7 @@ void ProcessThread::cutRunFront()
auto itor = cutRuleMap.find(sequence);
if (itor != cutRuleMap.end())
{
- qDebug() << "idx:" << idx << "find rule,sequence: " << sequence << "\n";
+ //qDebug() << "idx:" << idx << "find rule,sequence: " << sequence << "\n";
lastReceiveMessage = itor->second;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
@@ -192,7 +194,7 @@ void ProcessThread::cutRunFront()
{
if (sequence < cutRuleMap.begin()->first)
{
- //qDebug() << "idx:" << idx << " sequence: " << sequence << " too small " << "\n";
+ qDebug() << "idx:" << idx << " sequence: " << sequence << " too small " << "\n";
lastReceiveMessage = cutRuleMap.begin()->second;
WorkCutImage(image, lastReceiveMessage);
taskImageQueue.Pop();
@@ -244,7 +246,7 @@ void ProcessThread::cutRunFront()
}
else
{
- //qDebug() << "idx:" << idx << " sequence: " << sequence << " wait cut rule..... " << "\n";
+ qDebug() << "idx:" << idx << " sequence: " << sequence << " use last cut rule..... " << "\n";
//if (!taskROIQueue.WaitUntil(200))
//{
// //qDebug() << "idx:" << idx << " wait 200ms ,and not recv data!!!!!!!!!!!!" << endl;
@@ -526,28 +528,43 @@ void ProcessThread::WorkCutImage(std::shared_ptr<Image>& pImage, RoiMessage& roi
//qDebug() << "ROI- " << idx << " " << roi.X() << " " << roi.Y() << " " << roi.Width() << " " << roi.Height() << "\n";
#if USE_4K
+
size_t size = roi.Width() * roi.Height() << 2 << 2;
- uint8_t * buff1 = new uint8_t[size];
+ uint8_t* buff1 = new uint8_t[size];
libyuv::ConvertToARGB(pImage->GetBytes(), (3840 * 2160 << 2), buff1, (roi.Height() << 2 << 1),
- roi.X() << 1, roi.Y() << 1, 3840, 2160, roi.Width() << 1, roi.Height() << 1,
- libyuv::kRotate90, libyuv::FOURCC_ARGB);
-
- uint8_t * buff2 = new uint8_t[1440 * 1080 << 2];
+ roi.X() << 1, roi.Y() << 1, 3840, 2160, roi.Width() << 1, roi.Height() << 1,
+ libyuv::kRotate90, libyuv::FOURCC_ARGB);
+
+ uint8_t* buff2 = new uint8_t[1440 * 1080 << 2];
libyuv::ARGBScale(buff1, (roi.Height() << 2 << 1), roi.Height() << 1, roi.Width() << 1,
buff2, 1440 << 2, 1440, 1080, libyuv::FilterMode::kFilterNone);
-
- uint8_t * buff3 = new uint8_t[1920 * 1080 << 2];
- memset(buff3, 0, (1920 * 1080 << 2));
+
+ uint8_t* buff3 = (uint8_t*)av_malloc(1920 * 1080 << 2);
libyuv::ARGBCopy(buff2, 1440 << 2, (buff3 + BlackBottomHeight * 4), 1920 << 2, 1440, 1080);
-
- cv::Mat bgra = cv::Mat(1080, 1920, CV_8UC4, buff3);
- pImage->SetMat(bgra);
-
+
delete[] buff1;
delete[] buff2;
- delete[] buff3;
+
+ std::shared_ptr<Image> img = std::make_shared<Image>(
+ std::make_shared<AVBuff>(buff3), 1920, 1080, bmdFormat8BitBGRA,
+ pImage->getInputFrameCurTimeStamp(), pImage->getInputFrameSequence());
- emit PushFrame(pImage);
+ emit PushFrame(img);
+
+ uint64_t currTime, deltaTime;
+ int qsize;
+
+ m_fps++;
+ qsize = taskImageQueue.Size();
+ currTime = TimeMilliSecond();
+ deltaTime = currTime - m_lastRecvTS;
+ if (deltaTime >= 1000)
+ {
+ qDebug() << GetCurrDateTimeStr() << " crop fps " << m_fps << ", qsize " << qsize << "\n";
+
+ m_fps = 0;
+ m_lastRecvTS = currTime;
+ }
#elif USE_1080P
//qDebug() << "wgt------------idx:" << idx << ",cut image of timestamp : " << pImage->getInputFrameCurTimeStamp() << ", cur image of x : " << roi.X() << "\n";
@@ -609,6 +626,7 @@ void ProcessThread::WorkCutImage(std::shared_ptr<Image>& pImage, RoiMessage& roi
void ProcessThread::WorkCutImage720p(std::shared_ptr<Image>& pImage, RoiMessage& roi)
{
+#if 0
size_t size = roi.Width() * roi.Height() << 2;
uint8_t* buff1 = new uint8_t[size];
libyuv::ConvertToARGB(pImage->GetBytes(), (1920 * 1080 << 2), buff1, (roi.Height() << 2),
@@ -631,6 +649,7 @@ void ProcessThread::WorkCutImage720p(std::shared_ptr<Image>& pImage, RoiMessage&
delete[] buff3;
emit PushFrame(pImage);
+#endif
}
void ProcessThread::Clear()
diff --git a/MomentaMedia/src/Utils/Image.cpp b/MomentaMedia/src/Utils/Image.cpp
index 6e671f9..e2b006c 100644
--- a/MomentaMedia/src/Utils/Image.cpp
+++ b/MomentaMedia/src/Utils/Image.cpp
@@ -3,11 +3,7 @@
#include "opencv2/core.hpp"
#include "Utils/Common.h"
-Image::Image() : mat()
-{
-
-}
-
+/*
Image::Image(IDeckLinkVideoInputFrame* videoFrame)
{
#if USE_4K
@@ -60,39 +56,35 @@ Image::Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame)
inVideoFrame = MakeComPtr<DeckLinkInputVideoFrame>(videoFrame.Get());
ConvertDeckLinkVideoFrame2Mat(inVideoFrame, mat);
-}
+}*/
-Image::Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const qint64& curtimestamp, const qint64& sequence):inputFrameCurTimeStamp(curtimestamp),sequenceNum(sequence)
+Image::Image(std::shared_ptr<AVBuff> frame, int w, int h, BMDPixelFormat pixFmt, const qint64& curtimestamp, const qint64& sequence):
+ m_frame(frame),
+ m_width(w),
+ m_height(h),
+ m_pixFmt(pixFmt),
+ inputFrameCurTimeStamp(curtimestamp),
+ sequenceNum(sequence)
{
-#if USE_4K
- if (videoFrame->GetWidth() != 3840 || videoFrame->GetHeight() != 2160)
- {
- return;
- }
-#else
- if (videoFrame->GetWidth() != 1920 || videoFrame->GetHeight() != 1080)
- {
- return;
- }
-#endif
- inVideoFrame = MakeComPtr<DeckLinkInputVideoFrame>(videoFrame.Get());
- ConvertDeckLinkVideoFrame2Mat(inVideoFrame, mat);
}
+/*
Image::Image(const Image& other)
{
- mat = other.mat;
+
}
+
Image::Image(Image&& other)
{
mat = other.mat;
}
+
Image& Image::operator=(const Image& other)
{
- mat = other.mat;
+
return *this;
}
@@ -100,7 +92,7 @@ Image& Image::operator=(Image&& other)
{
mat = other.mat;
return *this;
-}
+}*/
Image::~Image()
{
@@ -109,71 +101,11 @@ Image::~Image()
bool Image::IsValid() const
{
- return !mat.empty();
+ return m_frame ? true : false;
}
uint8_t* Image::GetBytes() const
{
- if (IsValid())
- return mat.data;
- else
- return nullptr;
-}
-
-cv::Mat Image::GetMat()
-{
- return mat;
-}
-
-void Image::SetMat(cv::Mat& inMat)
-{
- mat = inMat.clone();
-}
-
-
-void Image::GetMatByRoi(cv::Rect roi, cv::Mat& outMat)
-{
- int w = GetWidth();
- int h = GetHegiht();
- if (roi.x + roi.width > w) roi.x = w - roi.width;
- if (roi.x < 0) roi.x = 0;
- if (roi.x + roi.width > w) roi.width = w - roi.x;
-
- if (roi.y + roi.height > h) roi.y = h - roi.height;
- if (roi.y < 0) roi.y = 0;
- if (roi.y + roi.height > h) roi.height = h - roi.y;
-
- outMat = mat(roi);
+ return m_frame->m_data;
}
-
-
-HRESULT ConvertDeckLinkVideoFrame2Mat(ComPtr<DeckLinkInputVideoFrame> videoFrame, cv::Mat& imageFrame)
-{
- static QMutex mtx;
-
- if(videoFrame.Get() != nullptr)
- {
- void* buffer;
- if (FAILED(videoFrame->GetBytes(&buffer)))
- return S_FALSE;
- cv::Mat bgra = cv::Mat(videoFrame->GetHeight(), videoFrame->GetWidth(), CV_8UC4, buffer);
- bgra.copyTo(imageFrame);
-
-#if 0
-#include <stdio.h>
- static bool used = false;
- mtx.lock();
- if (!used) {
- used = true;
- FILE* fp = fopen("convert.bgra", "wb");
- fwrite(bgra.data, 1, (1920 * 1080 * 4), fp);
- fclose(fp);
- }
- mtx.unlock();
-#endif
-
- return S_OK;
- }
- return S_FALSE;
-}
\ No newline at end of file
优化点:
优化点:
1、从板卡接收8BitYUV,降低处理数据量级
2、NDI直接输出YUV,省去YUV到BGRA的转换
3、使用单独线程缩放4K到1080
4、使用FFmpeg代替SDI缩放接口
5、移除Mat,优化Image图像数据存储
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment