Commit 783454f0 by 王国涛-悦动

pull

parent eccdcbf3
#include "Threads/CaptureThread.h"
#include "Threads/CaptureThread.h"
#include "Utils/Common.h"
static int64_t GetCurrTimeMS()
{
LARGE_INTEGER nStartTime;
//LARGE_INTEGER nStopTime;
//LARGE_INTEGER nElapsed;
LARGE_INTEGER nFrequency;
::QueryPerformanceFrequency(&nFrequency);
::QueryPerformanceCounter(&nStartTime);
return nStartTime.QuadPart * 1000 / nFrequency.QuadPart;
//::QueryPerformanceCounter(&nStopTime);
//nElapsed.QuadPart = (nStopTime.QuadPart - nStartTime.QuadPart) * 1000000;
//nElapsed.QuadPart /= nFrequency.QuadPart;
}
int CaptureThread::s_count = 0;
CaptureThread::CaptureThread()
: recvFrames(0),
idx(s_count++),
recvStartTime(QDateTime::currentMSecsSinceEpoch()),
m_fps(0),
m_lastRecvTS(TimeMilliSecond())
//taskQueue(std::string("task")+ std::to_string(idx))
{
//idx = s_count++;
m_scale = new VideoScale(3840, 2160, AV_PIX_FMT_UYVY422, 3840, 2160, AV_PIX_FMT_BGRA);
}
CaptureThread::~CaptureThread()
{
delete m_scale;
}
void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const qint64& timestamp)
{
int64_t st = QDateTime::currentMSecsSinceEpoch();
int64_t st2 = GetCurrTimeMS();
/*std::string tag = "CaptureThread::AddFrame::" + std::to_string(idx);
const char* tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
//qDebug() << "===> " << videoFrame->GetWidth() << " " << videoFrame->GetHeight() << "\n";
/*recvFrames++;
int elapse = (QDateTime::currentMSecsSinceEpoch() - recvStartTime) / 1000;
if (elapse >= 2) {
int fps = recvFrames / elapse;
qDebug() << "CaptureThread::AddFrame::fps::" << idx << "\t" << fps << "\n";
recvFrames = 0;
recvStartTime = QDateTime::currentMSecsSinceEpoch();
}*/
START_SLOT_TIME_COUNTER
if (videoFrame == nullptr)
return;
if (videoFrame->GetWidth() <=0 || videoFrame->GetHeight() <= 0)
return;
#if USE_4K
if (videoFrame->GetWidth() != 3840 || videoFrame->GetHeight() != 2160)
return;
#else
if (videoFrame->GetWidth() != 1920 || videoFrame->GetHeight() != 1080)
return;
#endif
if (taskQueue.size() >= 4)
qDebug() << "CaptureThread::AddFrame::qsize::" << idx << "\t" << taskQueue.size() << "\n";
/*tag = "CaptureThread::AddFrame::doing::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
{
std::unique_lock<std::mutex> ulock(mutex);
sequenceNum++;
/*tag = "CaptureThread::AddFrame::S0::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
//taskQueue.push(videoFrame);
videoFrameData video_data = { videoFrame,timestamp,sequenceNum };
taskVideoQueue.push(video_data);
/*tag = "CaptureThread::AddFrame::S1::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
cv.notify_all();
}
/*tag = "CaptureThread::AddFrame::End::" + std::to_string(idx);
tags = tag.c_str();*/
//PRINT_CURR_TIME(tags);
/*int64_t elaspe = QDateTime::currentMSecsSinceEpoch() - st;
int64_t elaspe2 = GetCurrTimeMS() - st2;
if (elaspe >= 10)
qDebug() << "====> " << idx << "\t" << elaspe << " " << elaspe2 << "\n";*/
END_SLOT_TIME_COUNTER
}
void CaptureThread::run()
{
void* srcBuff;
uint8_t* dstBuff;
int dstBuffSize;
uint64_t currTime, deltaTime;
int qsize;
while(true)
{
START_WAIT_TIME_COUNTER
ComPtr<IDeckLinkVideoInputFrame> videoFrame;
qint64 cur_time = 0;
qint64 sequence = 0;
/*if (taskQueue.WaitFor(videoFrame))
{
END_WAIT_TIME_COUNTER
if (videoFrame.Get() != nullptr)
{
std::shared_ptr<Image> image = std::make_shared<Image>(videoFrame);
emit PushFrame(image);
}
DEBUG_FUNCTION("taskQeueue Size: ", taskQueue.size())
}*/
{
std::unique_lock<std::mutex> ulock(mutex);
while (taskVideoQueue.empty()) {
cv.wait(ulock);
}
auto& video_data = taskVideoQueue.front();
videoFrame = video_data.video_frame;
cur_time = video_data.timestamp;
sequence = video_data.sequenceNum;
taskVideoQueue.pop();
qsize = taskVideoQueue.size();
}
if (videoFrame.Get() != nullptr)
{
//auto cur_time = QDateTime::currentMSecsSinceEpoch();
//qDebug() << "input frame cur time:" << cur_time << "\n"
videoFrame->GetBytes(&srcBuff);
m_scale->scale((uint8_t*)srcBuff, 0, &dstBuff, &dstBuffSize);
std::shared_ptr<Image> image = std::make_shared<Image>(
std::make_shared<AVBuff>(dstBuff), 3840, 2610, bmdFormat8BitBGRA, cur_time, sequence);
emit PushFrame(image);
m_fps++;
}
currTime = TimeMilliSecond();
deltaTime = currTime - m_lastRecvTS;
if (deltaTime >= 1000)
{
qDebug() << GetCurrDateTimeStr() << " capture scale fps " << m_fps << ", qsize " << qsize << "\n";
m_fps = 0;
m_lastRecvTS = currTime;
}
/*while (taskQueue.size() > 30)
{
//taskQueue.Pop(videoFrame);
{
std::unique_lock<std::mutex> ulock(mutex);
taskQueue.pop();
}
}*/
}
}
\ No newline at end of file
#pragma once
#pragma once
#include <QThread>
#include <QMutex>
#include "Utils/Image.h"
#include "Utils/SampleQueue.h"
#include "Utils/VideoScale.h"
#include <condition_variable>
#include <memory>
#include <mutex>
#include <queue>
class CaptureThread : public QThread
{
Q_OBJECT
public:
CaptureThread();
~CaptureThread();
public slots:
void AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame,const qint64& timestamp);
signals:
void PushFrame(std::shared_ptr<Image> image);
private:
typedef struct videoFrameData
{
ComPtr<IDeckLinkVideoInputFrame> video_frame;
qint64 timestamp;
qint64 sequenceNum = 0;
}videoFrameData;
static int s_count;
int idx;
//SampleQueue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
void run() override;
int recvFrames;
int64_t recvStartTime;
std::queue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
std::queue<videoFrameData> taskVideoQueue;
std::condition_variable cv;
std::mutex mutex;
qint64 sequenceNum = 0;
VideoScale* m_scale;
int m_fps;
uint64_t m_lastRecvTS;
};
\ No newline at end of file
#pragma once
#pragma once
#include "DeckLinkAPI.h"
#include <QDateTime.h>
#include <sys/timeb.h>
#define USE_4K 1
#define USE_1080P 1
long GetRowBytesFromPixelFormat(long width, BMDPixelFormat pixelFormat);
static QString GetCurrDateTimeStr()
{
QString time_format = "yyyy-MM-dd HH:mm:ss";
QDateTime a = QDateTime::currentDateTime();
QString as = a.toString(time_format);
return as;
}
static uint64_t TimeMilliSecond()
{
timeb now;
ftime(&now);
return now.time * 1000 + now.millitm;
}
\ No newline at end of file
#include <QCoreApplication>
#include <QCoreApplication>
#include <QMessageBox>
#include <QTextStream>
#include "Utils/Platform.h"
#include "BlackMagicDesign/DeckLinkInputDevice.h"
#include "Utils/Common.h"
extern int AudioChannel;
DeckLinkInputDevice::DeckLinkInputDevice(QObject* parent, ComPtr<IDeckLink>& device, int index)
: RefCount(1),
Owner(parent),
DeckLink(device),
DeckLinkInput(IID_IDeckLinkInput, device),
DeckLinkConfig(IID_IDeckLinkConfiguration, device),
bSupportsFormatDetection(false),
bCurrentlyCapturing(false),
bApplyDetectedInputMode(false),
bLastValidFrameStatus(false),
SupportedInputConnections(bmdVideoConnectionUnspecified),
SelectedInputConnection(bmdVideoConnectionUnspecified),
Index(index),
m_pushed(true),
PrevInputSignalAbsent(false),
m_fps(0),
m_lastRecvTS(TimeMilliSecond())
{
//thd = std::thread(&DeckLinkInputDevice::ForwardThread, this);
}
DeckLinkInputDevice::~DeckLinkInputDevice()
{
if (bCurrentlyCapturing)
StopCapture();
}
// IUnknown methods
HRESULT DeckLinkInputDevice::QueryInterface(REFIID riid, LPVOID* ppv)
{
HRESULT result = S_OK;
if (ppv == nullptr)
return E_INVALIDARG;
// Obtain the IUnknown interface and compare it the provided REFIID
if(riid == IID_IUnknown)
{
*ppv = this;
AddRef();
}
else if(riid == IID_IDeckLinkInputCallback)
{
*ppv = (IDeckLinkInputCallback*)this;
AddRef();
}
else
{
*ppv = nullptr;
result = E_NOINTERFACE;
}
return result;
}
ULONG DeckLinkInputDevice::AddRef()
{
return ++RefCount;
}
ULONG DeckLinkInputDevice::Release()
{
ULONG newRefValue = --RefCount;
if (newRefValue == 0)
delete this;
return newRefValue;
}
// IDeckLinkInputCallback methods
HRESULT DeckLinkInputDevice::VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioPacket)
{
// Since this application only previews, everything is driven from IDeckLinkScreenPreviewCallback::DrawFrame
ComPtr<IDeckLinkVideoInputFrame> frame = ComPtr<IDeckLinkVideoInputFrame>(videoFrame);
//emit ArrivedFrame(frame);
if (videoFrame && Capture) {
unsigned flags = videoFrame->GetFlags();
bool noInputSourceFlag = false;
if (flags & bmdFrameHasNoInputSource) {
//qDebug() << GetCurrDateTimeStr() << "index: " << Index << " DeckLinkInputDevice get video frame No input source " << hex << flags << " ------------ \n";
//return S_OK;
noInputSourceFlag = true;
}
bool restartStream = !noInputSourceFlag && PrevInputSignalAbsent;
if (restartStream)
{
DeckLinkInput->StopStreams();
DeckLinkInput->FlushStreams();
DeckLinkInput->StartStreams();
}
PrevInputSignalAbsent = noInputSourceFlag;
if (noInputSourceFlag)
return S_OK;
if (m_pushed)
{
auto cur_time = QDateTime::currentMSecsSinceEpoch();
Capture->AddFrame(frame, cur_time);
if (NDIOutput)
NDIOutput->AddFrame(frame);
}
m_pushed = !m_pushed;
uint64_t currTime, deltaTime;
m_fps++;
currTime = TimeMilliSecond();
deltaTime = currTime - m_lastRecvTS;
if (deltaTime >= 1000)
{
qDebug() << GetCurrDateTimeStr() << " decklink input fps " << m_fps << "\n";
m_fps = 0;
m_lastRecvTS = currTime;
}
}
if(audioPacket)
{
//qDebug() << "DeckLinkInputDevice get audio packet--------------" << "\n";
auto cur_time = QDateTime::currentMSecsSinceEpoch();
std::shared_ptr<AudioPacket> audio_ptr = std::make_shared<AudioPacket>(audioPacket, cur_time, AudioChannel);
emit PushAudioFrame(audio_ptr);
}
/*std::unique_lock<std::mutex> ulock(mutex);
taskQueue.push(frame);
cv.notify_all();*/
return S_OK;
}
/*void DeckLinkInputDevice::ForwardThread()
{
ComPtr<IDeckLinkVideoInputFrame> frame;
while (true) {
{
std::unique_lock<std::mutex> ulock(mutex);
while (taskQueue.empty()) {
cv.wait(ulock);
}
frame = taskQueue.front();
taskQueue.pop();
}
if (Capture) {
Capture->AddFrame(frame);
}
}
}*/
HRESULT DeckLinkInputDevice::VideoInputFormatChanged(BMDVideoInputFormatChangedEvents notificationEvents, IDeckLinkDisplayMode* newDisplayMode, BMDDetectedVideoInputFormatFlags detectedSignalFlags)
{
HRESULT result;
BMDPixelFormat pixelFormat;
BMDDisplayMode displayMode = newDisplayMode->GetDisplayMode();
return S_OK;
// Unexpected callback when auto-detect mode not enabled
if (!bApplyDetectedInputMode)
return E_FAIL;
if (detectedSignalFlags & bmdDetectedVideoInputRGB444)
{
if (detectedSignalFlags & bmdDetectedVideoInput8BitDepth)
pixelFormat = bmdFormat8BitARGB;
else if (detectedSignalFlags & bmdDetectedVideoInput10BitDepth)
pixelFormat = bmdFormat10BitRGB;
else if (detectedSignalFlags & bmdDetectedVideoInput12BitDepth)
pixelFormat = bmdFormat12BitRGB;
else
// Invalid color depth for RGB
return E_INVALIDARG;
}
else if (detectedSignalFlags & bmdDetectedVideoInputYCbCr422)
{
if (detectedSignalFlags & bmdDetectedVideoInput8BitDepth)
pixelFormat = bmdFormat8BitYUV;
else if (detectedSignalFlags & bmdDetectedVideoInput10BitDepth)
pixelFormat = bmdFormat10BitYUV;
else
// Invalid color depth for YUV
return E_INVALIDARG;
}
else
// Unexpected detected video input format flags
return E_INVALIDARG;
// Restart streams if either display mode or colorspace has changed
if (notificationEvents & (bmdVideoInputDisplayModeChanged | bmdVideoInputColorspaceChanged))
{
// Stop the capture
DeckLinkInput->StopStreams();
// Set the video input mode
//pixelFormat = bmdFormat8BitYUV;
result = DeckLinkInput->EnableVideoInput(displayMode, pixelFormat, bmdVideoInputEnableFormatDetection);
if (result == S_OK)
// Start the capture
result = DeckLinkInput->StartStreams();
if (result != S_OK)
// Let owner know we couldn`t restart capture with detected input video mode
QCoreApplication::postEvent(Owner, new QEvent(kErrorRestartingCaptureEvent));
else
QCoreApplication::postEvent(Owner, new DeckLinkInputFormatChangedEvent(displayMode));
}
return S_OK;
}
// Other methods
bool DeckLinkInputDevice::Initialize()
{
ComPtr<IDeckLinkProfileAttributes> deckLinkAttributes(IID_IDeckLinkProfileAttributes, DeckLink);
dlbool_t attributeFlag;
// Get input interface
if (!DeckLinkInput)
// This may occur if device does not have input interface, for instance DeckLink Mini Monitor.
return false;
// Get Configuration interface so we can change input connector
// We hold onto IDeckLinkConfiguration until destructor to retain input connector setting
if (!DeckLinkConfig)
return false;
// Get attributes interface
if (!deckLinkAttributes)
return false;
// Check if input mode detection is supported.
if (deckLinkAttributes->GetFlag(BMDDeckLinkSupportsInputFormatDetection, &attributeFlag) == S_OK)
bSupportsFormatDetection = attributeFlag;
else
bSupportsFormatDetection = false;
// Get the supported input connections for the device
if (deckLinkAttributes->GetInt(BMDDeckLinkVideoInputConnections, &SupportedInputConnections) != S_OK)
SupportedInputConnections = 0;
return true;
}
bool DeckLinkInputDevice::StartCapture(BMDDisplayMode displayMode, IDeckLinkScreenPreviewCallback* screenPreviewCallback, bool applyDetectedInputMode)
{
BMDVideoInputFlags videoInputFlags = bmdVideoInputFlagDefault;
bApplyDetectedInputMode = applyDetectedInputMode;
// Enable input video mode detection if the device supports it
if (bSupportsFormatDetection && bApplyDetectedInputMode)
videoInputFlags |= bmdVideoInputEnableFormatDetection;
// Set the screen preview
DeckLinkInput->SetScreenPreviewCallback(screenPreviewCallback);
// Set capture callback
DeckLinkInput->SetCallback(this);
// Set the video input mode
if (DeckLinkInput->EnableVideoInput(bmdMode4K2160p50, bmdFormat8BitYUV, bmdVideoInputFlagDefault) != S_OK)
return false;
if (DeckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, bmdAudioSampleType16bitInteger, AudioChannel) != S_OK)
return false;
// Set the capture
if (DeckLinkInput->StartStreams() != S_OK)
return false;
bCurrentlyCapturing = true;
return true;
}
void DeckLinkInputDevice::StopCapture()
{
if (DeckLinkInput)
{
// Stop the capture
DeckLinkInput->StopStreams();
DeckLinkInput->DisableVideoInput();
// Delete the callbacks
DeckLinkInput->SetScreenPreviewCallback(nullptr);
DeckLinkInput->SetCallback(nullptr);
}
bCurrentlyCapturing = false;
}
void DeckLinkInputDevice::QuerySupportedVideoModes(DeckLinkDisplayModeQueryFunc func)
{
ComPtr<IDeckLinkDisplayModeIterator> displayModeIterator;
ComPtr<IDeckLinkDisplayMode> displayMode;
if (DeckLinkInput->GetDisplayModeIterator(displayModeIterator.ReleaseAndGetAddressOf()) != S_OK)
return;
// Iterate through each supported display mode for the input connection
while(displayModeIterator->Next(displayMode.ReleaseAndGetAddressOf()) == S_OK)
{
dlbool_t supported = false;
BMDDisplayMode mode = displayMode->GetDisplayMode();
if ((DeckLinkInput->DoesSupportVideoMode(SelectedInputConnection, mode, bmdFormatUnspecified, bmdNoVideoInputConversion, bmdSupportedVideoModeDefault, NULL, &supported) == S_OK) && supported)
{
func(displayMode.Get());
}
}
}
HRESULT DeckLinkInputDevice::SetInputVideoConnection(BMDVideoConnection connection)
{
HRESULT result = DeckLinkConfig->SetInt(bmdDeckLinkConfigVideoInputConnection, (int64_t)connection);
if (result != S_OK)
return result;
SelectedInputConnection = connection;
return S_OK;
}
bool DeckLinkInputDevice::IsActive()
{
return IsDeviceActive(DeckLink);
}
#pragma once
#pragma once
#include <atomic>
#include <functional>
#include <QString>
#include <QObject>
#include <condition_variable>
#include <memory>
#include <mutex>
#include <queue>
#include <thread>
#include "DeckLinkAPI.h"
#include "Utils/CustomEvents.h"
#include "Utils/ComPtr.h"
#include "Utils/Image.h"
#include "Utils/AudioPacket.h"
#include "Threads/CaptureThread.h"
#include "NDI/NDIOutputThread.h"
class DeckLinkInputDevice : public QObject, public IDeckLinkInputCallback
{
Q_OBJECT
public:
using DeckLinkDisplayModeQueryFunc = std::function<void(IDeckLinkDisplayMode*)>;
DeckLinkInputDevice(QObject* parent, ComPtr<IDeckLink>& deckLink, int index);
virtual ~DeckLinkInputDevice();
// IUnknown interface
virtual HRESULT QueryInterface(REFIID riid, LPVOID* ppv) override;
virtual ULONG AddRef() override;
virtual ULONG Release() override;
// IDeckLinkInputCallback interface
virtual HRESULT VideoInputFormatChanged(BMDVideoInputFormatChangedEvents notificationEvents, IDeckLinkDisplayMode* newDisplayMode, BMDDetectedVideoInputFormatFlags detectedSignalFlags) override;
virtual HRESULT VideoInputFrameArrived(IDeckLinkVideoInputFrame* videoFrame, IDeckLinkAudioInputPacket* audioPacket) override;
// Other methods
bool Initialize(void);
HRESULT GetDeviceName(QString& deviceName);
bool IsCapturing(void) const { return bCurrentlyCapturing; }
bool SupportsFormatDetection(void) const { return bSupportsFormatDetection; }
BMDVideoConnection GetVideoConnections(void) const { return (BMDVideoConnection)SupportedInputConnections; }
bool IsActive(void);
bool StartCapture(BMDDisplayMode displayMode, IDeckLinkScreenPreviewCallback* screenPreviewCallback, bool applyDetectedInputMode);
void StopCapture(void);
void QuerySupportedVideoModes(DeckLinkDisplayModeQueryFunc func);
HRESULT SetInputVideoConnection(BMDVideoConnection connection);
ComPtr<IDeckLink> GetDeckLinkInstance(void) const { return DeckLink; }
ComPtr<IDeckLinkInput> GetDeckLinkInput(void) const { return DeckLinkInput; }
ComPtr<IDeckLinkConfiguration> GetDeckLinkConfiguration(void) const { return DeckLinkConfig; }
void SetCapture(const std::shared_ptr<CaptureThread>& capture) {
Capture = capture;
}
void SetNDIOutputThread(const std::shared_ptr<NDIOutputThread>& ndiOuptutThread) {
NDIOutput = ndiOuptutThread;
}
//void ForwardThread();
signals:
void ArrivedFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame);
void PushAudioFrame(std::shared_ptr<AudioPacket> image);
private:
std::atomic<ULONG> RefCount;
QObject* Owner;
//
ComPtr<IDeckLink> DeckLink;
ComPtr<IDeckLinkInput> DeckLinkInput;
ComPtr<IDeckLinkConfiguration> DeckLinkConfig;
//
bool bSupportsFormatDetection;
bool bCurrentlyCapturing;
bool bApplyDetectedInputMode;
bool bLastValidFrameStatus;
int64_t SupportedInputConnections;
BMDVideoConnection SelectedInputConnection;
//
std::shared_ptr<CaptureThread> Capture;
std::shared_ptr<NDIOutputThread> NDIOutput;
//std::queue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
//std::condition_variable cv;
//std::mutex mutex;
//std::thread thd;
int Index;
bool m_pushed;
bool PrevInputSignalAbsent;
int m_fps;
uint64_t m_lastRecvTS;
};
class DeckLinkInputFormatChangedEvent : public QEvent
{
public:
DeckLinkInputFormatChangedEvent(BMDDisplayMode displayMode) : QEvent(kVideoFormatChangedEvent), DisplayMode(displayMode){}
virtual ~DeckLinkInputFormatChangedEvent(){};
BMDDisplayMode GetDisplayMode() const { return DisplayMode; }
private:
BMDDisplayMode DisplayMode;
};
namespace
{
inline bool IsDeviceActive(ComPtr<IDeckLink>& deckLink)
{
ComPtr<IDeckLinkProfileAttributes> deckLinkAttributes(IID_IDeckLinkProfileAttributes, deckLink);
int64_t intAttribute;
if (!deckLinkAttributes)
return false;
if (deckLinkAttributes->GetInt(BMDDeckLinkDuplex, &intAttribute) != S_OK)
return false;
return ((BMDDuplexMode)intAttribute) != bmdDuplexInactive;
}
}
#pragma once
#pragma once
#include <QCheckBox>
#include <QComboBox>
#include <QFormLayout>
#include <QLabel>
#include <functional>
#include "DeckLinkInputDevice.h"
#include "DeckLinkOpenGLWidget.h"
#include "Utils/ComPtr.h"
#include "NDI/NDIOutputThread.h"
#include "Threads/CaptureThread.h"
class DeckLinkInputPage : public QWidget
{
Q_OBJECT
public:
DeckLinkInputPage();
virtual ~DeckLinkInputPage();
void SetPreviewSize(QSize previewSize, int index);
void customEvent(QEvent* event) override;
void StartCapture(void);
void AddDevice(ComPtr<IDeckLink>& deckLink, bool deviceIsActive);
void RemoveDevice(ComPtr<IDeckLink>& deckLink);
void EnableDevice(ComPtr<IDeckLink>& deckLink, bool enable);
bool ReleaseDeviceIfSelected(ComPtr<IDeckLink>& deckLink);
DeckLinkOpenGLWidget* GetPreviewView(void) const { return PreviewView; }
ComPtr<DeckLinkInputDevice> GetSelectedDevice(void) const { return SelectedDevice; }
CaptureThread* GetCapture() { return Capture.get(); }
public slots:
void InputDeviceChanged(int selectedDeviceIndex);
void InputConnectionChanged(int selectedConnectionIndex);
void VideoFormatChanged(int selectedVideoFormatIndex);
void AutoDetectChanged(int autoDetectState);
void RequestedDeviceGranted(ComPtr<IDeckLink>& device);
signals:
void RequestDeckLink(ComPtr<IDeckLink>& device);
void RequestDeckLinkIfAvailable(ComPtr<IDeckLink>& device);
void RelinquishDeckLink(ComPtr<IDeckLink>& device);
void FormatChanged(BMDDisplayMode displayMode);
private slots:
void ObjectNameChanged(const QString& newName);
private:
void RestartCapture(void);
void DetectedVideoFormatChanged(BMDDisplayMode displayMode);
void SelectedDeviceChanged(void);
void RefreshInputConnectionMenu(void);
void RefreshDisplayModeMenu(void);
ComPtr<DeckLinkInputDevice> SelectedDevice;
DeckLinkOpenGLWidget* PreviewView;
std::shared_ptr<CaptureThread> Capture;
std::shared_ptr<NDIOutputThread> NDIOutput;
QFormLayout* FormLayout;
QComboBox* DeviceListCombo;
QComboBox* InputConnectionCombo;
QComboBox* VideoFormatCombo;
QCheckBox* AutoDetectCheckBox;
QLabel* NDINameLabel;
int Index;
};
\ No newline at end of file
#pragma once
#pragma once
#include <atomic>
#include <functional>
#include <condition_variable>
#include <list>
#include <memory>
#include <QMutex>
#include <QThread>
#include <QString>
#include <QObject>
#include "DeckLinkAPI.h"
#include "Utils/CustomEvents.h"
#include "Utils/Common.h"
#include "Utils/SampleQueue.h"
#include "Utils/ComPtr.h"
#include "Utils/Platform.h"
#include "DeckLinkOutputVideoFrame.h"
#include "Utils/Image.h"
#include "Utils/AudioPacket.h"
class DeckLinkOutputDevice : public QObject, public IDeckLinkVideoOutputCallback
{
Q_OBJECT
enum class PlaybackState { Idle, Starting, Prerolling, Running, Stopping, Stopped };
using ScheduledFrameCompletedCallback = std::function<void(ComPtr<DeckLinkOutputVideoFrame>)>;
using ScheduledFramesList = std::list<ComPtr<DeckLinkOutputVideoFrame>>;
public:
DeckLinkOutputDevice(ComPtr<IDeckLink>& decklink, int videoPrerollSize,int index);
virtual ~DeckLinkOutputDevice() = default;
// IUnknown interface
HRESULT STDMETHODCALLTYPE QueryInterface(REFIID iid, LPVOID* ppv) override;
ULONG STDMETHODCALLTYPE AddRef() override;
ULONG STDMETHODCALLTYPE Release() override;
// IDeckLinkVideoOutputCallback interface
HRESULT STDMETHODCALLTYPE ScheduledFrameCompleted(IDeckLinkVideoFrame* completedFrame, BMDOutputFrameCompletionResult result) override;
HRESULT STDMETHODCALLTYPE ScheduledPlaybackHasStopped() override;
// Other methods
bool StartPlayback(BMDDisplayMode displayMode, bool enable3D, BMDPixelFormat pixelFormat, bool requireReferenceLocked, IDeckLinkScreenPreviewCallback* screenPreviewCallback);
void StopPlayback(void);
void CancelWaitForReference();
BMDTimeScale getFrameTimescale(void) const { return frameTimescale; }
bool getReferenceSignalMode(BMDDisplayMode* mode);
bool isPlaybackActive(void);
void onScheduledFrameCompleted(const ScheduledFrameCompletedCallback& callback) { scheduledFrameCompletedCallback = callback; }
ComPtr<IDeckLink> GetDeckLinkInstance(void) const { return deckLink; }
ComPtr<IDeckLinkOutput> getDeckLinkOutput(void) const { return deckLinkOutput; }
public slots:
void AddAudioFrame(std::shared_ptr<AudioPacket> audio_packet);
void AddFrame(std::shared_ptr<Image> image);
private:
void InitResource();
private:
std::atomic<ULONG> RefCount;
PlaybackState state;
//
ComPtr<IDeckLink> deckLink;
ComPtr<IDeckLinkOutput> deckLinkOutput;
BMDVideoConnection SelectedOutputConnection;
//
SampleQueue<std::shared_ptr<Image>> outputVideoFrameQueue;
SampleQueue<std::shared_ptr<AudioPacket>> outputAudioFrameQueue;
//ScheduledFramesList scheduledFramesList;
//
uint32_t videoPrerollSize;
//
BMDTimeValue frameDuration;
BMDTimeScale frameTimescale;
//
bool seenFirstVideoFrame;
BMDTimeValue startPlaybackTime;
bool first_sleep;
//
//std::mutex mutex;
//std::condition_variable playbackStoppedCondition;
//
std::thread scheduleVideoFramesThread;
std::thread scheduleAudioFramesThread;
//
ScheduledFrameCompletedCallback scheduledFrameCompletedCallback;
// Private methods
void scheduleVideoFramesFunc(void);
void scheduleAudioFramesFunc(void);
void scheduleAudioFramesFuncDeley(void);
void outputAudioFrameFunc(void);
bool waitForReferenceSignalToLock(void);
void checkEndOfPreroll(void);
//
int64_t current_video_time = 0;
int64_t current_sleep_ms = 0;
qint32 Index;
qint32 Device_id;
qint32 output_deley_ms;//
qint32 audio_max_size;
int m_fps;
uint64_t m_lastRecvTS;
};
#include "BlackMagicDesign/DeckLinkOutputVideoFrame.h"
#include "BlackMagicDesign/DeckLinkOutputVideoFrame.h"
#include <chrono>
#include <QDebug>
#include "Utils/Common.h"
#include <immintrin.h>
#include <cstdint>
#if 0
DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame() : RefCount(1), width(-1), height(-1), rowBytes(-1), frameFlags(bmdFrameFlagDefault), pixelFormat(bmdFormat8BitBGRA)
{
}
DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame(int w, int h, BMDFrameFlags flags, BMDPixelFormat pixelFormat) : RefCount(1), width(w), height(h), frameFlags(bmdFrameFlagDefault), pixelFormat(pixelFormat)
{
rowBytes = GetRowBytesFromPixelFormat(width, pixelFormat);
buffer.resize(height * rowBytes);
}
DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame(Image& image)
: RefCount(1),
width(image.GetWidth()),
height(image.GetHegiht()),
frameFlags(image.GetFlags()),
pixelFormat(image.GetPixelFormat())
{
rowBytes = GetRowBytesFromPixelFormat(width, pixelFormat);
buffer.resize(height * rowBytes);
memcpy(buffer.data(), image.GetBytes(), height * rowBytes);
}
#endif
DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame(std::shared_ptr<Image> image)
: RefCount(1),
width(image->GetWidth()),
height(image->GetHegiht()),
frameFlags(image->GetFlags()),
pixelFormat(image->GetPixelFormat()),
m_img(image)
{
rowBytes = GetRowBytesFromPixelFormat(width, pixelFormat);
//buffer.resize(height * rowBytes);
//memcpy(buffer.data(), image->GetBytes(), height * rowBytes);
}
#if 0
DeckLinkOutputVideoFrame::DeckLinkOutputVideoFrame(Image&& image)
: RefCount(1),
width(image.GetWidth()),
height(image.GetHegiht()),
frameFlags(image.GetFlags()),
pixelFormat(image.GetPixelFormat())
{
rowBytes = GetRowBytesFromPixelFormat(width, pixelFormat);
buffer.resize(height * rowBytes);
memcpy(buffer.data(), image.GetBytes(), height * rowBytes);
}
#endif
DeckLinkOutputVideoFrame::~DeckLinkOutputVideoFrame()
{
//buffer.clear();
}
ULONG DeckLinkOutputVideoFrame::AddRef()
{
return ++RefCount;
}
ULONG DeckLinkOutputVideoFrame::Release()
{
ULONG newRefValue = --RefCount;
if (newRefValue == 0)
delete this;
return newRefValue;
}
HRESULT DeckLinkOutputVideoFrame::QueryInterface(REFIID riid, LPVOID* ppv)
{
HRESULT result = E_NOINTERFACE;
if (ppv == nullptr)
return E_INVALIDARG;
// Initialize the return result
*ppv = nullptr;
// Obtain the IUnknown interface and compare it the provided REFIID
if(riid == IID_IUnknown)
{
*ppv = this;
AddRef();
result = S_OK;
}
else if(riid == IID_IDeckLinkVideoFrame)
{
*ppv = (IDeckLinkVideoFrame*)this;
AddRef();
result = S_OK;
}
return result;
}
long DeckLinkOutputVideoFrame::GetWidth()
{
return (long)width;
}
long DeckLinkOutputVideoFrame::GetHeight()
{
return (long)height;
}
BMDFrameFlags DeckLinkOutputVideoFrame::GetFlags()
{
return frameFlags;
}
BMDPixelFormat DeckLinkOutputVideoFrame::GetPixelFormat()
{
return pixelFormat;
}
long DeckLinkOutputVideoFrame::GetRowBytes()
{
return rowBytes;
}
HRESULT DeckLinkOutputVideoFrame::GetBytes(void** buf)
{
*buf = m_img->GetBytes();
return S_OK;
}
#pragma once
#pragma once
#include <atomic>
#include <vector>
#include "Utils/ComPtr.h"
#include "DeckLinkAPI.h"
#include "Utils/Image.h"
class DeckLinkOutputVideoFrame : public IDeckLinkVideoFrame
{
public:
//DeckLinkOutputVideoFrame();
//DeckLinkOutputVideoFrame(int w, int h, BMDFrameFlags flags, BMDPixelFormat pixelFormat);
//DeckLinkOutputVideoFrame(Image& image);
DeckLinkOutputVideoFrame(std::shared_ptr<Image> image);
//DeckLinkOutputVideoFrame(Image&& image);
virtual ~DeckLinkOutputVideoFrame();
// IDeckLinkVideoFrame interface
virtual long STDMETHODCALLTYPE GetWidth();
virtual long STDMETHODCALLTYPE GetHeight();
virtual long STDMETHODCALLTYPE GetRowBytes();
virtual HRESULT STDMETHODCALLTYPE GetBytes(void** buf);
virtual BMDFrameFlags STDMETHODCALLTYPE GetFlags();
virtual BMDPixelFormat STDMETHODCALLTYPE GetPixelFormat();
// Dummy implementations of remaining method in IDeckLinkVideoFrame
virtual HRESULT STDMETHODCALLTYPE GetAncillaryData(IDeckLinkVideoFrameAncillary** ancillary) { return E_NOTIMPL; }
virtual HRESULT STDMETHODCALLTYPE GetTimecode(BMDTimecodeFormat format, IDeckLinkTimecode** timecode) { return E_NOTIMPL; }
// IUnknown interface
virtual HRESULT STDMETHODCALLTYPE QueryInterface(REFIID riid, void** ppv);
virtual ULONG STDMETHODCALLTYPE AddRef();
virtual ULONG STDMETHODCALLTYPE Release();
void setVideoStreamTime(const BMDTimeValue time) { videoStreamTime = time; }
void setVideoFrameDuration(const BMDTimeValue time) { videoFrameDuration = time; }
void setInputFrameStartReferenceTime(const BMDTimeValue time) { inputFrameStartReferenceTime = time; }
void setInputFrameArrivedReferenceTime(const BMDTimeValue time) { inputFrameArrivedReferenceTime = time; }
void setOutputFrameScheduledReferenceTime(const BMDTimeValue time) { outputFrameScheduledReferenceTime = time; }
void setOutputFrameCompletedReferenceTime(const BMDTimeValue time) { outputFrameCompletedReferenceTime = time; }
void setOutputCompletionResult(const BMDOutputFrameCompletionResult result) { outputFrameCompletionResult = result; }
BMDTimeValue getVideoStreamTime() { return videoStreamTime; }
BMDTimeValue getVideoFrameDuration() { return videoFrameDuration; }
BMDTimeValue getInputFrameStartReferenceTime() { return inputFrameStartReferenceTime; }
BMDTimeValue getInputFrameArrivedReferenceTime() { return inputFrameArrivedReferenceTime; }
BMDTimeValue getOutputFrameScheduledReferenceTime() { return outputFrameScheduledReferenceTime; }
BMDTimeValue getOutputFrameCompletedReferenceTime() { return outputFrameCompletedReferenceTime; }
BMDOutputFrameCompletionResult getOutputCompletionResult() { return outputFrameCompletionResult; }
private:
std::atomic<ULONG> RefCount;
//
int width;
int height;
int rowBytes;
BMDPixelFormat pixelFormat;
BMDFrameFlags frameFlags;
std::vector<uint8_t> buffer;
// Timecode
BMDTimeValue videoStreamTime;
BMDTimeValue videoFrameDuration;
BMDTimeValue inputFrameStartReferenceTime;
BMDTimeValue inputFrameArrivedReferenceTime;
BMDTimeValue outputFrameScheduledReferenceTime;
BMDTimeValue outputFrameCompletedReferenceTime;
BMDOutputFrameCompletionResult outputFrameCompletionResult;
std::shared_ptr<Image> m_img;
};
#include "Utils/Image.h"
#include "Utils/Image.h"
#include "Utils/ComPtr.h"
#include "opencv2/core.hpp"
#include "Utils/Common.h"
/*
Image::Image(IDeckLinkVideoInputFrame* videoFrame)
{
#if USE_4K
if (videoFrame->GetWidth() != 3840 || videoFrame->GetHeight() != 2160)
{
return;
}
#else
if(videoFrame->GetWidth() != 1920 || videoFrame->GetHeight() != 1080)
{
return;
}
#endif
inVideoFrame = MakeComPtr<DeckLinkInputVideoFrame>(videoFrame);
ConvertDeckLinkVideoFrame2Mat(inVideoFrame, mat);
}
Image::Image(ComPtr<DeckLinkInputVideoFrame> videoFrame)
{
#if USE_4K
if (videoFrame->GetWidth() != 3840 || videoFrame->GetHeight() != 2160)
{
return;
}
#else
if (videoFrame->GetWidth() != 1920 || videoFrame->GetHeight() != 1080)
{
return;
}
#endif
inVideoFrame = videoFrame;
ConvertDeckLinkVideoFrame2Mat(videoFrame, mat);
}
Image::Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame)
{
#if USE_4K
if (videoFrame->GetWidth() != 3840 || videoFrame->GetHeight() != 2160)
{
return;
}
#else
if (videoFrame->GetWidth() != 1920 || videoFrame->GetHeight() != 1080)
{
return;
}
#endif
inVideoFrame = MakeComPtr<DeckLinkInputVideoFrame>(videoFrame.Get());
ConvertDeckLinkVideoFrame2Mat(inVideoFrame, mat);
}*/
Image::Image(std::shared_ptr<AVBuff> frame, int w, int h, BMDPixelFormat pixFmt, const qint64& curtimestamp, const qint64& sequence):
m_frame(frame),
m_width(w),
m_height(h),
m_pixFmt(pixFmt),
inputFrameCurTimeStamp(curtimestamp),
sequenceNum(sequence)
{
}
/*
Image::Image(const Image& other)
{
}
Image::Image(Image&& other)
{
mat = other.mat;
}
Image& Image::operator=(const Image& other)
{
return *this;
}
Image& Image::operator=(Image&& other)
{
mat = other.mat;
return *this;
}*/
Image::~Image()
{
}
bool Image::IsValid() const
{
return m_frame ? true : false;
}
uint8_t* Image::GetBytes() const
{
return m_frame->m_data;
}
#pragma once
#pragma once
#include <QtCore/QtCore>
#include <memory>
#include "opencv2/core.hpp"
#include "opencv2/imgproc.hpp"
#include "DeckLinkAPI.h"
#include "BlackMagicDesign/DeckLinkInputVideoFrame.h"
#include "Utils//AVBuffer.h"
class Image : public QObject
{
Q_OBJECT
public:
Image() {}
//Image(IDeckLinkVideoInputFrame* videoFrame);
//Image(ComPtr<DeckLinkInputVideoFrame> videoFrame);
Image(std::shared_ptr<AVBuff> frame, int w, int h, BMDPixelFormat pixFmt, const qint64& curtimestamp, const qint64& sequence);
//Image(ComPtr<IDeckLinkVideoInputFrame> videoFrame);
Image(const Image& other)
{
m_frame = other.m_frame;
m_width = other.m_width;
m_height = other.m_height;
m_pixFmt = other.m_pixFmt;
inputFrameCurTimeStamp = other.inputFrameCurTimeStamp;
sequenceNum = other.sequenceNum;
};
//Image(Image&& other);
~Image();
//Image& operator=(const Image& other) { return *this; };
//Image& operator=(Image&& other);
//cv::Mat GetMat();
//void SetMat(cv::Mat& inMat);
uint8_t* GetBytes() const;
//int GetSize() const { return mat.rows * mat.cols * 4; }
bool IsValid() const;
int GetWidth() { return m_width; }
int GetHegiht() { return m_height; }
BMDPixelFormat GetPixelFormat() { return m_pixFmt; }
BMDFrameFlags GetFlags() { return bmdFrameFlagDefault; }
void setVideoStreamTime(const BMDTimeValue time) { videoStreamTime = time; }
void setVideoFrameDuration(const BMDTimeValue time) { videoFrameDuration = time; }
void setInputFrameStartReferenceTime(const BMDTimeValue time) { inputFrameStartReferenceTime = time; }
void setInputFrameArrivedReferenceTime(const BMDTimeValue time) { inputFrameArrivedReferenceTime = time; }
void setOutputFrameScheduledReferenceTime(const BMDTimeValue time) { outputFrameScheduledReferenceTime = time; }
void setOutputFrameCompletedReferenceTime(const BMDTimeValue time) { outputFrameCompletedReferenceTime = time; }
void setOutputCompletionResult(const BMDOutputFrameCompletionResult result) { outputFrameCompletionResult = result; }
BMDTimeValue getVideoStreamTime() { return videoStreamTime; }
BMDTimeValue getVideoFrameDuration() { return videoFrameDuration; }
BMDTimeValue getInputFrameStartReferenceTime() { return inputFrameStartReferenceTime; }
BMDTimeValue getInputFrameArrivedReferenceTime() { return inputFrameArrivedReferenceTime; }
BMDTimeValue getOutputFrameScheduledReferenceTime() { return outputFrameScheduledReferenceTime; }
BMDTimeValue getOutputFrameCompletedReferenceTime() { return outputFrameCompletedReferenceTime; }
BMDOutputFrameCompletionResult getOutputCompletionResult() { return outputFrameCompletionResult; }
int64_t getInputFrameCurTimeStamp() { return inputFrameCurTimeStamp; }
int64_t getInputFrameSequence() { return sequenceNum; }
//void GetMatByRoi(cv::Rect roi, cv::Mat& mat);
/*void Fill(void* dst, int dstSize)
{
void* buf;
if(inVideoFrame->GetBytes(&buf) == S_OK)
{
memcpy_s(dst, dstSize, buf, inVideoFrame->GetRowBytes() * inVideoFrame->GetHeight());
}
}*/
private:
//cv::Mat mat;
//ComPtr<DeckLinkInputVideoFrame> inVideoFrame;
// Timecode
BMDTimeValue videoStreamTime;
BMDTimeValue videoFrameDuration;
BMDTimeValue inputFrameStartReferenceTime;
BMDTimeValue inputFrameArrivedReferenceTime;
BMDTimeValue outputFrameScheduledReferenceTime;
BMDTimeValue outputFrameCompletedReferenceTime;
qint64 inputFrameCurTimeStamp;//
qint64 sequenceNum;//
BMDOutputFrameCompletionResult outputFrameCompletionResult;
int m_width;
int m_height;
BMDPixelFormat m_pixFmt;
std::shared_ptr<AVBuff> m_frame;
};
HRESULT ConvertDeckLinkVideoFrame2Mat(ComPtr<DeckLinkInputVideoFrame> videoFrame, cv::Mat& imageFrame);
\ No newline at end of file
#include <csignal>
#include <csignal>
#include <cstddef>
#include <cstring>
#include <cstdint>
#include <cstdio>
#include <cstdlib>
#include <atomic>
#include "NDI/NDIOutputThread.h"
#include "Utils/Common.h"
#include "libyuv.h"
extern qint64 StartTimeStamp;
NDIOutputThread::NDIOutputThread(const QString& Name, int w, int h) : NDISenderName(Name), width(w), height(h), isSending(false), Instance(nullptr),
m_lastTS(TimeMilliSecond()),
m_fps(0),
m_seqNum(0)
{
m_scale = new VideoScale(3840, 2160, AV_PIX_FMT_UYVY422, 1920, 1080, AV_PIX_FMT_UYVY422);
}
NDIOutputThread::~NDIOutputThread()
{
free(Frame.p_data);
NDIlib_send_destroy(Instance);
delete m_scale;
}
void NDIOutputThread::SetNDISenderName(const QString& Name)
{
NDISenderName = Name;
}
QString NDIOutputThread::GetNDISenderName() const
{
return NDISenderName;
}
void NDIOutputThread::SetNDIImageSize(int w, int h)
{
if (w <= 0 || h <= 0)
return;
width = w;
height = h;
}
bool NDIOutputThread::Init()
{
// Init NDIlib with static lib
if (!CheckValid())
return false;
if(Instance != nullptr)
{
free(Frame.p_data);
NDIlib_send_destroy(Instance);
}
NDIlib_send_create_t NDI_Send_Create_Desc;
std::string strname = NDISenderName.toStdString();
NDI_Send_Create_Desc.p_ndi_name = strname.c_str();
Instance = NDIlib_send_create(&NDI_Send_Create_Desc);
if (!Instance) return false;
// Provide a meta-data registration that allows people to know what we are. Note that this is optional.
// Note that it is possible for senders to also register their preferred video formats.
//NDIlib_metadata_frame_t NDI_connection_type;
//NDIlib_send_add_connection_metadata(Instance, &NDI_connection_type);
Frame.xres = width;
Frame.yres = height;
Frame.FourCC = NDIlib_FourCC_type_UYVY;
Frame.line_stride_in_bytes = Frame.xres * 2;
//Frame.p_data = (uint8_t*)malloc(Frame.xres * Frame.yres * 2);
Frame.frame_rate_D = 1;
Frame.frame_rate_N = 25;
Frame.frame_format_type = NDIlib_frame_format_type_progressive;
Frame.picture_aspect_ratio = 16.0 / 9;
//Frame.timecode = NDIlib_send_timecode_synthesize;
//Frame.timestamp = 0;
Frame.p_metadata = nullptr;
return true;
}
void NDIOutputThread::run()
{
ComPtr<IDeckLinkVideoInputFrame> frame;
void* srcBuff;
uint8_t* dstBuff;
int dstBuffSize;
uint64_t currTime, deltaTime;
if (!Init())
return;
while (true)
{
if (taskQueue.WaitFor(frame))
{
frame->GetBytes(&srcBuff);
m_scale->scale((uint8_t*)srcBuff, 0, &dstBuff, &dstBuffSize);
Frame.p_data = dstBuff;
Frame.timestamp = ++m_seqNum;
NDIlib_send_send_video_v2(Instance, &Frame);
av_free(dstBuff);
taskQueue.Pop(frame);
}
m_fps++;
currTime = TimeMilliSecond();
deltaTime = currTime - m_lastTS;
if (deltaTime >= 1000)
{
qDebug() << GetCurrDateTimeStr() << " NDI ouptut fps " << m_fps << ", qsize " << taskQueue.Size() << "\n";
m_fps = 0;
m_lastTS = currTime;
}
}
}
#if 0
void NDIOutputThread::run()
{
if (!Init())
return;
while(true)
{
START_WAIT_TIME_COUNTER
std::shared_ptr<Image> frame;
if(taskQueue.WaitFor(frame))
{
END_WAIT_TIME_COUNTER
START_TIME_COUNTER
if(frame->IsValid())
{
#if USE_4K
libyuv::ARGBScale(frame->GetBytes(), 3840 << 2, 3840, 2160,
Frame.p_data, 1920 << 2, 1920, 1080, libyuv::FilterMode::kFilterNone);
#else
frame->Fill(Frame.p_data, Frame.xres * Frame.yres * 4);
#endif
//qDebug() << "wgt-----------------------------"<<"ndi(" << NDISenderName << ")timestamp : " << frame->getInputFrameCurTimeStamp() - StartTimeStamp << "\n";
//Frame.timestamp = frame->getInputFrameCurTimeStamp();
Frame.timestamp = frame->getInputFrameSequence();
NDIlib_send_send_video_v2(Instance, &Frame);
}
taskQueue.Pop(frame);
END_TIME_COUNTER
}
}
}
#endif
void NDIOutputThread::Clear()
{
taskQueue.Reset();
}
void NDIOutputThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> frame)
{
START_SLOT_TIME_COUNTER
//if (!frame->IsValid())
// return;
//qDebug() << "NDIOutputThread::AddFrame\n";
taskQueue.Push(frame);
END_SLOT_TIME_COUNTER
}
#pragma once
#pragma once
#include <QThread>
#include <QMutex>
#include <QWaitCondition>
#include <queue>
#include "Processing.NDI.Advanced.h"
#include "Utils/Image.h"
#include "Utils/SampleQueue.h"
#include "Utils/VideoScale.h"
class NDIOutputThread : public QThread
{
Q_OBJECT
public:
NDIOutputThread(const QString& Name, int w, int h);
~NDIOutputThread();
void SetNDISenderName(const QString& Name);
QString GetNDISenderName() const;
void SetNDIImageSize(int w, int h);
int GetWidth() { return width; }
int GetHeight() { return height; }
bool IsSending() const { return isSending; }
bool CheckValid() const { return width > 0 && height > 0 && !NDISenderName.isEmpty(); }
void Clear();
public slots:
//void AddFrame(std::shared_ptr<Image> frame);
void AddFrame(ComPtr<IDeckLinkVideoInputFrame> frame);
private:
NDIOutputThread() : NDISenderName(""), width(-1), height(-1), Instance(nullptr), isSending(false) {}
bool Init();
virtual void run() override;
QString NDISenderName;
int width;
int height;
bool isSending;
//SampleQueue<std::shared_ptr<Image>> taskQueue;
SampleQueue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
NDIlib_send_instance_t Instance;
NDIlib_video_frame_v2_t Frame;
VideoScale* m_scale;
uint64_t m_lastTS;
int m_fps;
uint64_t m_seqNum;
};
\ No newline at end of file
#pragma once
#pragma once
#include <QThread>
#include <QMutex>
#include <QWaitCondition>
#include <QUdpSocket>
#include <QJsonDocument>
#include <map>
#include "Utils/SampleQueue.h"
#include "Utils/Image.h"
const QString MODE_CLEAR = "no_mode";
const QString MODE_CROP = "crop_roi";
const QString MODE_STOP = "stop";
const QString MODE_ACK = "checked_ok";
#define CROP1080WIDTH 810
#define CROP720WIDTH 720
#define CROPHEIGHT 1080
class RoiMessage
{
public:
#if USE_1080P
RoiMessage() : w(CROP1080WIDTH), h(CROPHEIGHT)
{
x = 1920 / 2 - w / 2;
y = 1080 / 2 - h / 2;
timecode = 0;
}
#else
RoiMessage() : w(CROP720WIDTH), h(CROPHEIGHT)
{
x = 1920 / 2 - w / 2;
y = 1080 / 2 - h / 2;
timecode = 0;
}
#endif
RoiMessage(QByteArray& data)
{
QJsonDocument document = QJsonDocument::fromJson(data);
QJsonObject object;
if (document.isObject())
{
object = document.object();
mode = object.value("signal").toString();
QJsonArray roi = object.value("roi").toArray();
int minx = roi[0].toInt();
int miny = roi[1].toInt();
int maxx = roi[2].toInt();
int maxy = roi[3].toInt();
id = object.value("id").toInt();
width = object.value("width").toInt();
height = object.value("height").toInt();
timecode = QString::number(object.value("timecode").toDouble(), 'f', 0).toLongLong();
w = maxx - minx;
h = maxy - miny;
x = minx;
y = miny;
}
}
RoiMessage(QByteArray&& data)
{
QJsonDocument document = QJsonDocument::fromJson(data);
QJsonObject object;
if (document.isObject())
{
object = document.object();
mode = object.value("signal").toString();
QJsonArray roi = object.value("roi").toArray();
int minx = roi[0].toInt();
int miny = roi[1].toInt();
int maxx = roi[2].toInt();
int maxy = roi[3].toInt();
id = object.value("id").toInt();
width = object.value("width").toInt();
height = object.value("height").toInt();
timecode = QString::number(object.value("timecode").toDouble(), 'f', 0).toLongLong();
w = maxx - minx;
h = maxy - miny;
x = minx;
y = miny;
}
}
RoiMessage(const RoiMessage& other) : x(other.x), y(other.y), w(other.w), h(other.h), timecode(other.timecode)
{
}
RoiMessage(RoiMessage&& other)
{
x = other.x;
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
}
RoiMessage operator=(const RoiMessage& other)
{
x = other.x;
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
return *this;
}
RoiMessage operator=(RoiMessage&& other)
{
x = other.x;
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
return *this;
}
RoiMessage(int X, int Y, int W, int H)
:x(X),
y(Y),
w(W),
h(H),
timecode(0) {}
bool IsValid()
{
return x > 0 && y > 0 && w > 0 && h > 0;
}
void SetX(int x_)
{
this->x = x_;
}
int X() { return x; }
int Y() { return y; }
int Width() { return w; }
int Height() { return h; }
qint64 Timecode() { return timecode; }
private:
int x;
int y;
int w;
int h;
QString mode;
qint64 timecode;
int id = 0;
int width;
int height;
};
class ProcessThread : public QThread
{
Q_OBJECT
public:
ProcessThread();
~ProcessThread();
void SetUpUDP(const QString hostAddr, const QString hostPort);
public slots:
void AddFrame(std::shared_ptr<Image> image);
void ReadDatagrams();
void ReadDatagramsNew();
signals:
void PushFrame(std::shared_ptr<Image> image);
protected:
void run() override;
private:
void WorkCutImage(std::shared_ptr<Image>& pImage, RoiMessage& roi);
void WorkCutImage720p(std::shared_ptr<Image>& pImage, RoiMessage& roi);
void cutRun();
void cutRunFront();
void cutRunFrontFixedRegion();
private:
//SampleQueue<Image> taskPrerollQueue;
SampleQueue<std::shared_ptr<Image>> taskImageQueue;
SampleQueue<RoiMessage> taskROIQueue;
std::map<qint64, RoiMessage> cutRuleMap;
QUdpSocket* udpSocket;
//
uint32_t videoPrerollSize;
RoiMessage lastReceiveMessage;
void Clear();
static int s_count;
int idx;
int output_deley_time;
int sendFrames;
int64_t sendStartTime;
int recvROIs;
int64_t recvROIStartTime;
int continuousLostNums;//ʾղudp
int minTaskImageQueueSize;
bool firstMinSize = false;
int m_fps;
uint64_t m_lastRecvTS;
};
\ No newline at end of file
优化点:
优化点:
1、从板卡接收8BitYUV,降低处理数据量级
2、NDI直接输出YUV,省去YUV到BGRA的转换
3、使用单独线程缩放4K到1080
4、使用FFmpeg代替SDI缩放接口
5、移除Mat,优化Image图像数据存储
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment