Commit 46675251 by 景炳强

1.add 720p 2.add audio 3.syn frame 4.delay output

parent 8306c261
CaptureThread::AddFrame::fps:: 0 10
ProcessThread::run::fps:: 0 7
CaptureThread::AddFrame::fps:: 2 11
ProcessThread::run::fps:: 2 6
ProcessThread::run::fps:: 3 6
CaptureThread::AddFrame::fps:: 3 11
CaptureThread::AddFrame::fps:: 1 14
ProcessThread::run::fps:: 1 0
CaptureThread::AddFrame::fps:: 0 20
CaptureThread::AddFrame::fps:: 2 19
ProcessThread::run::fps:: 0 15
ProcessThread::run::fps:: 3 15
ProcessThread::run::fps:: 2 15
CaptureThread::AddFrame::fps:: 3 19
CaptureThread::AddFrame::fps:: 1 19
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -131,6 +131,7 @@
<ClInclude Include="include\BlackMagicDesign\ReferenceTime.h" />
<QtMoc Include="include\Threads\ProcessThread.h" />
<ClInclude Include="include\Orbit.h" />
<QtMoc Include="include\Utils\AudioPacket.h" />
<ClInclude Include="include\Utils\Common.h" />
<ClInclude Include="include\Utils\DispatchQueue.h" />
<ClInclude Include="include\Utils\LatencyStatistics.h" />
......@@ -174,6 +175,7 @@
</ClCompile>
<ClCompile Include="src\Threads\CaptureThread.cpp" />
<ClCompile Include="src\Threads\ProcessThread.cpp" />
<ClCompile Include="src\Utils\AudioPacket.cpp" />
<ClCompile Include="src\Utils\Common.cpp" />
<ClCompile Include="src\Utils\Image.cpp" />
<ClCompile Include="src\Utils\LatencyStatistics.cpp" />
......
......@@ -89,6 +89,9 @@
<QtMoc Include="include\Threads\ProcessThread.h">
<Filter>Header Files\Threads</Filter>
</QtMoc>
<QtMoc Include="include\Utils\AudioPacket.h">
<Filter>Header Files\Utils</Filter>
</QtMoc>
</ItemGroup>
<ItemGroup>
<ClInclude Include="include\stdafx.h">
......@@ -223,6 +226,9 @@
<ClCompile Include="src\Threads\ProcessThread.cpp">
<Filter>Source Files\Threads</Filter>
</ClCompile>
<ClCompile Include="src\Utils\AudioPacket.cpp">
<Filter>Source Files\Utils</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<QtUic Include="Form\MomentaMedia.ui">
......
......@@ -13,6 +13,7 @@
#include "Utils/CustomEvents.h"
#include "Utils/ComPtr.h"
#include "Utils/Image.h"
#include "Utils/AudioPacket.h"
#include "Threads/CaptureThread.h"
class DeckLinkInputDevice : public QObject, public IDeckLinkInputCallback
......@@ -59,7 +60,7 @@ public:
signals:
void ArrivedFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame);
void PushAudioFrame(std::shared_ptr<AudioPacket> image);
private:
std::atomic<ULONG> RefCount;
QObject* Owner;
......
......@@ -17,6 +17,7 @@
#include "Utils/Platform.h"
#include "DeckLinkOutputVideoFrame.h"
#include "Utils/Image.h"
#include "Utils/AudioPacket.h"
class DeckLinkOutputDevice : public QObject, public IDeckLinkVideoOutputCallback
{
......@@ -56,6 +57,7 @@ public:
ComPtr<IDeckLinkOutput> getDeckLinkOutput(void) const { return deckLinkOutput; }
public slots:
void AddAudioFrame(std::shared_ptr<AudioPacket> audio_packet);
void AddFrame(std::shared_ptr<Image> image);
private:
......@@ -67,6 +69,7 @@ private:
BMDVideoConnection SelectedOutputConnection;
//
SampleQueue<std::shared_ptr<Image>> outputVideoFrameQueue;
SampleQueue<std::shared_ptr<AudioPacket>> outputAudioFrameQueue;
//ScheduledFramesList scheduledFramesList;
//
uint32_t videoPrerollSize;
......@@ -76,16 +79,20 @@ private:
//
bool seenFirstVideoFrame;
BMDTimeValue startPlaybackTime;
bool first_sleep;
//
//std::mutex mutex;
//std::condition_variable playbackStoppedCondition;
//
std::thread scheduleVideoFramesThread;
std::thread scheduleAudioFramesThread;
//
ScheduledFrameCompletedCallback scheduledFrameCompletedCallback;
// Private methods
void scheduleVideoFramesFunc(void);
void scheduleAudioFramesFunc(void);
void outputAudioFrameFunc(void);
bool waitForReferenceSignalToLock(void);
void checkEndOfPreroll(void);
};
......@@ -19,12 +19,17 @@ public:
~CaptureThread();
public slots:
void AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame);
void AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame,const qint64& timestamp);
signals:
void PushFrame(std::shared_ptr<Image> image);
private:
typedef struct videoFrameData
{
ComPtr<IDeckLinkVideoInputFrame> video_frame;
qint64 timestamp;
}videoFrameData;
static int s_count;
int idx;
//SampleQueue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
......@@ -35,6 +40,7 @@ private:
int64_t recvStartTime;
std::queue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
std::queue<videoFrameData> taskVideoQueue;
std::condition_variable cv;
std::mutex mutex;
};
\ No newline at end of file
......@@ -5,6 +5,7 @@
#include <QWaitCondition>
#include <QUdpSocket>
#include <QJsonDocument>
#include <map>
#include "Utils/SampleQueue.h"
#include "Utils/Image.h"
......@@ -14,20 +15,36 @@ const QString MODE_CROP = "crop_roi";
const QString MODE_STOP = "stop";
const QString MODE_ACK = "checked_ok";
#define CROP1080WIDTH 810
#define CROP720WIDTH 720
#define CROPHEIGHT 1080
#define USE_1080P 1
class RoiMessage
{
public:
RoiMessage() : w(800), h(1080)
#if USE_1080P
RoiMessage() : w(CROP1080WIDTH), h(CROPHEIGHT)
{
x = 1920 / 2 - w / 2;
y = 1080 / 2 - h / 2;
timecode = 0;
}
#else
RoiMessage() : w(CROP720WIDTH), h(CROPHEIGHT)
{
x = 1920 / 2 - w / 2;
y = 1080 / 2 - h / 2;
timecode = 0;
}
#endif
RoiMessage(QByteArray& data)
{
QJsonDocument document = QJsonDocument::fromJson(data);
QJsonObject object;
if(document.isObject())
if (document.isObject())
{
object = document.object();
mode = object.value("signal").toString();
......@@ -40,6 +57,7 @@ public:
width = object.value("width").toInt();
height = object.value("height").toInt();
timecode = QString::number(object.value("timecode").toDouble(), 'f', 0).toLongLong();
w = maxx - minx;
h = maxy - miny;
......@@ -65,6 +83,7 @@ public:
width = object.value("width").toInt();
height = object.value("height").toInt();
timecode = QString::number(object.value("timecode").toDouble(), 'f', 0).toLongLong();
w = maxx - minx;
h = maxy - miny;
......@@ -73,7 +92,7 @@ public:
}
}
RoiMessage(const RoiMessage& other) : x(other.x), y(other.y), w(other.w), h(other.h)
RoiMessage(const RoiMessage& other) : x(other.x), y(other.y), w(other.w), h(other.h), timecode(other.timecode)
{
}
......@@ -84,6 +103,7 @@ public:
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
}
RoiMessage operator=(const RoiMessage& other)
......@@ -92,6 +112,7 @@ public:
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
return *this;
}
......@@ -101,6 +122,7 @@ public:
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
return *this;
}
......@@ -109,10 +131,16 @@ public:
return x > 0 && y > 0 && w > 0 && h > 0;
}
void SetX(int x_)
{
this->x = x_;
}
int X() { return x; }
int Y() { return y; }
int Width() { return w; }
int Height() { return h; }
qint64 Timecode() { return timecode; }
private:
int x;
......@@ -121,6 +149,7 @@ private:
int h;
QString mode;
qint64 timecode;
int id = 0;
int width;
int height;
......@@ -138,17 +167,22 @@ public:
public slots:
void AddFrame(std::shared_ptr<Image> image);
void ReadDatagrams();
void ReadDatagramsNew();
signals:
void PushFrame(std::shared_ptr<Image> image);
protected:
void run() override;
private:
void WorkCutImage(std::shared_ptr<Image>& pImage, RoiMessage& roi);
void WorkCutImage720p(std::shared_ptr<Image>& pImage, RoiMessage& roi);
void cutRun();
private:
//SampleQueue<Image> taskPrerollQueue;
SampleQueue<std::shared_ptr<Image>> taskImageQueue;
SampleQueue<RoiMessage> taskROIQueue;
std::map<qint64, RoiMessage> cutRuleMap;
QUdpSocket* udpSocket;
......@@ -166,4 +200,7 @@ private:
int recvROIs;
int64_t recvROIStartTime;
int continuousLostNums;//ʾղudp
};
\ No newline at end of file
#pragma once
#include <QtCore/QtCore>
#include <QMetaType>
#include "DeckLinkAPI.h"
class AudioPacket : public QObject
{
Q_OBJECT
public:
AudioPacket(IDeckLinkAudioInputPacket* audioPacket,qint64& timestamp);
~AudioPacket();
public:
QByteArray audio_data;
qint32 sample;
qint32 channel;
qint32 size;
qint64 frame_time_stamp;
};
......@@ -20,8 +20,12 @@ public:
void Push(const T& sample);
void Push(T&& sample);
bool Pop(T& sample);
bool Pop();
bool Front(T& sample);
bool WaitFor(T& sample);
bool WaitFor();
bool WaitUntil(T& sample, int timeout);
bool WaitUntil(int timeout);
void CancelWaiters(void);
void Reset(void);
......@@ -104,6 +108,28 @@ bool SampleQueue<T>::Pop(T& sample)
}
template <typename T>
bool SampleQueue<T>::Pop()
{
std::lock_guard<std::mutex> locker(mutex);
if (queue.empty())
return false;
queue.pop();
return true;
}
template <typename T>
bool SampleQueue<T>::Front(T& sample)
{
std::lock_guard<std::mutex> locker(mutex);
if (queue.empty())
return false;
//sample = std::move(queue.front());
sample = queue.front();
return true;
}
template <typename T>
bool SampleQueue<T>::WaitFor(T& sample)
{
// Blocking wait for sample
......@@ -121,6 +147,38 @@ bool SampleQueue<T>::WaitFor(T& sample)
}
template <typename T>
bool SampleQueue<T>::WaitFor()
{
// Blocking wait for sample
std::unique_lock<std::mutex> locker(mutex);
queueCondition.wait(locker, [&] {return !queue.empty() || waitCancelled; });
if (waitCancelled)
return false;
else if (!queue.empty())
{
return true;
}
return true;
}
template <typename T>
bool SampleQueue<T>::WaitUntil(int timeout)
{
std::unique_lock<std::mutex> locker(mutex);
auto delay = std::chrono::system_clock::now() + std::chrono::milliseconds(timeout);
queueCondition.wait_until(locker, delay, [&] {return !queue.empty() || waitCancelled; });
if (waitCancelled || queue.empty())
return false;
else if (!queue.empty())
{
return true;
}
return true;
}
template <typename T>
bool SampleQueue<T>::WaitUntil(T& sample, int timeout)
{
// Blocking wait for sample
......
......@@ -78,7 +78,15 @@ HRESULT DeckLinkInputDevice::VideoInputFrameArrived(IDeckLinkVideoInputFrame* vi
ComPtr<IDeckLinkVideoInputFrame> frame = ComPtr<IDeckLinkVideoInputFrame>(videoFrame);
//emit ArrivedFrame(frame);
if (Capture) {
Capture->AddFrame(frame);
auto cur_time = QDateTime::currentMSecsSinceEpoch();
Capture->AddFrame(frame, cur_time);
}
if(audioPacket)
{
//qDebug() << "DeckLinkInputDevice get audio packet--------------" << "\n";
auto cur_time = QDateTime::currentMSecsSinceEpoch();
std::shared_ptr<AudioPacket> audio_ptr = std::make_shared<AudioPacket>(audioPacket, cur_time);
emit PushAudioFrame(audio_ptr);
}
/*std::unique_lock<std::mutex> ulock(mutex);
......@@ -197,6 +205,7 @@ bool DeckLinkInputDevice::Initialize()
if (deckLinkAttributes->GetInt(BMDDeckLinkVideoInputConnections, &SupportedInputConnections) != S_OK)
SupportedInputConnections = 0;
return true;
}
......@@ -215,11 +224,14 @@ bool DeckLinkInputDevice::StartCapture(BMDDisplayMode displayMode, IDeckLinkScre
// Set capture callback
DeckLinkInput->SetCallback(this);
// Set the video input mode
if (DeckLinkInput->EnableVideoInput(displayMode, bmdFormat10BitYUV, videoInputFlags) != S_OK)
return false;
if (DeckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, bmdAudioSampleType16bitInteger, 2) != S_OK)
return false;
// Set the capture
if (DeckLinkInput->StartStreams() != S_OK)
return false;
......
......@@ -10,6 +10,8 @@
/// DeckLinkOpenGLOverlay
///
extern int PrvwFlag;
DeckLinkOpenGLOverlayWidget::DeckLinkOpenGLOverlayWidget(QWidget* parent) : QWidget(parent)
{
Delegate = new DeckLinkPreviewOverlay(this);
......@@ -35,7 +37,7 @@ DeckLinkOpenGLWidget::DeckLinkOpenGLWidget(QWidget* parent) : QOpenGLWidget(pare
GetDeckLinkOpenGLScreenPreviewHelper(DeckLinkScreenPreviewHelper);
Delegate = MakeComPtr<ScreenPreviewCallback>();
connect(Delegate.Get(), &ScreenPreviewCallback::FrameArrived, this, &DeckLinkOpenGLWidget::SetFrame, Qt::QueuedConnection);
if(PrvwFlag) connect(Delegate.Get(), &ScreenPreviewCallback::FrameArrived, this, &DeckLinkOpenGLWidget::SetFrame, Qt::QueuedConnection);
OverlayWidget = new DeckLinkOpenGLOverlayWidget(this);
}
......
......@@ -6,6 +6,8 @@
extern int OutputDeleyTime;
#define OUTPUT_1080 1
DeckLinkOutputDevice::DeckLinkOutputDevice(ComPtr<IDeckLink>& decklink, int videoPrerollSize)
: RefCount(1),
state(PlaybackState::Idle),
......@@ -14,7 +16,8 @@ DeckLinkOutputDevice::DeckLinkOutputDevice(ComPtr<IDeckLink>& decklink, int vide
videoPrerollSize(1),
seenFirstVideoFrame(false),
startPlaybackTime(0),
scheduledFrameCompletedCallback(nullptr)
scheduledFrameCompletedCallback(nullptr),
first_sleep(false)
{
// Check that device has an output interface, this will throw an error if using a capture-only device such as DeckLink Mini Recorder
if (!deckLinkOutput)
......@@ -116,6 +119,12 @@ HRESULT DeckLinkOutputDevice::ScheduledPlaybackHasStopped()
bool DeckLinkOutputDevice::StartPlayback(BMDDisplayMode displayMode, bool enable3D, BMDPixelFormat pixelFormat, bool requireReferenceLocked, IDeckLinkScreenPreviewCallback* screenPreviewCallback)
{
BMDDisplayMode outputDisplayMode;
#if OUTPUT_1080
outputDisplayMode = displayMode;
#else
outputDisplayMode = BMDDisplayMode::bmdModeHD720p50;
#endif
// Pass through RP188 timecode and VANC from input frame. VITC timecode is forwarded with VANC
BMDVideoOutputFlags outputFlags = (BMDVideoOutputFlags)(bmdVideoOutputRP188 | bmdVideoOutputVANC);
ComPtr<IDeckLinkDisplayMode> deckLinkDisplayMode;
......@@ -130,12 +139,12 @@ bool DeckLinkOutputDevice::StartPlayback(BMDDisplayMode displayMode, bool enable
state = PlaybackState::Starting;
}
if((deckLinkOutput->DoesSupportVideoMode(bmdVideoConnectionUnspecified, displayMode, pixelFormat, bmdNoVideoOutputConversion, supportedVideoModeFlags, nullptr, &displayModeSupported) != S_OK) || !displayModeSupported)
if((deckLinkOutput->DoesSupportVideoMode(bmdVideoConnectionUnspecified, outputDisplayMode, pixelFormat, bmdNoVideoOutputConversion, supportedVideoModeFlags, nullptr, &displayModeSupported) != S_OK) || !displayModeSupported)
{
return false;
}
if (deckLinkOutput->GetDisplayMode(displayMode, deckLinkDisplayMode.ReleaseAndGetAddressOf()) != S_OK)
if (deckLinkOutput->GetDisplayMode(outputDisplayMode, deckLinkDisplayMode.ReleaseAndGetAddressOf()) != S_OK)
return false;
if (deckLinkDisplayMode->GetFrameRate(&frameDuration, &frameTimescale) != S_OK)
......@@ -148,7 +157,10 @@ bool DeckLinkOutputDevice::StartPlayback(BMDDisplayMode displayMode, bool enable
if (deckLinkOutput->SetScheduledFrameCompletionCallback(this) != S_OK)
return false;
if (deckLinkOutput->EnableVideoOutput(displayMode, outputFlags) != S_OK)
if (deckLinkOutput->EnableVideoOutput(outputDisplayMode, outputFlags) != S_OK)
return false;
if (deckLinkOutput->EnableAudioOutput(bmdAudioSampleRate48kHz,bmdAudioSampleType16bitInteger,2,bmdAudioOutputStreamTimestamped) != S_OK)
return false;
deckLinkOutput->SetScreenPreviewCallback(screenPreviewCallback);
......@@ -160,9 +172,10 @@ bool DeckLinkOutputDevice::StartPlayback(BMDDisplayMode displayMode, bool enable
}
outputVideoFrameQueue.Reset();
outputAudioFrameQueue.Reset();
scheduleVideoFramesThread = std::thread(&DeckLinkOutputDevice::scheduleVideoFramesFunc, this);
scheduleAudioFramesThread = std::thread(&DeckLinkOutputDevice::scheduleAudioFramesFunc, this);
{
//std::lock_guard<std::mutex> locker(mutex);
state = PlaybackState::Prerolling;
......@@ -193,9 +206,13 @@ void DeckLinkOutputDevice::StopPlayback()
}
outputVideoFrameQueue.CancelWaiters();
outputAudioFrameQueue.CancelWaiters();
if (scheduleVideoFramesThread.joinable())
scheduleVideoFramesThread.join();
if (scheduleAudioFramesThread.joinable())
scheduleAudioFramesThread.join();
}
// In scheduled playback is running, stop video and audio streams immediately
......@@ -211,6 +228,8 @@ void DeckLinkOutputDevice::StopPlayback()
// Disable video
deckLinkOutput->DisableVideoOutput();
// Disable audio
deckLinkOutput->DisableAudioOutput();
// Dereference DeckLinkOutputDevice delegate from callbacks
deckLinkOutput->SetScheduledFrameCompletionCallback(nullptr);
......@@ -264,6 +283,14 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
return state != PlaybackState::Idle;
}
void DeckLinkOutputDevice::outputAudioFrameFunc(void)
{
while (true)
{
}
}
void DeckLinkOutputDevice::scheduleVideoFramesFunc()
{
while(true)
......@@ -280,8 +307,8 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
qDebug() << "input frame cur time:" << input_time << " now time:" <<now_time << " dever time:"<<dever_time << "\n";
if (dever_time < OutputDeleyTime * 1000)
{
//qDebug() << "sleep ms:" << 5 * 1000 - dever_time << "\n";
Sleep(OutputDeleyTime * 1000 - dever_time);
std::this_thread::sleep_for(std::chrono::milliseconds(OutputDeleyTime * 1000 - dever_time));
}
END_WAIT_TIME_COUNTER
//std::lock_guard<std::mutex> locker(mutex);
......@@ -305,6 +332,7 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
break;
}
//scheduledFramesList.push_back(outputFrame);
//checkEndOfPreroll();
......@@ -316,6 +344,33 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
}
}
void DeckLinkOutputDevice::scheduleAudioFramesFunc(void)
{
while (true)
{
std::shared_ptr<AudioPacket> audio_packet;
if (outputAudioFrameQueue.WaitFor(audio_packet))
{
auto data = audio_packet->audio_data.data();
auto sample = audio_packet->sample;
quint32 sampleFramesWritten;
HRESULT ret = deckLinkOutput->WriteAudioSamplesSync(data,sample,&sampleFramesWritten);
if(ret == S_OK)
{
if (sampleFramesWritten < sample)
{
}
}
else
{
qDebug() << "Unable to schedule output audio frame" << "\n";
break;
}
}
}
}
bool DeckLinkOutputDevice::waitForReferenceSignalToLock()
{
ComPtr<IDeckLinkStatus> deckLinkStatus(IID_IDeckLinkStatus, deckLink);
......@@ -359,8 +414,19 @@ void DeckLinkOutputDevice::AddFrame(std::shared_ptr<Image> image)
return;
if (outputVideoFrameQueue.Size() >= 4)
qDebug() << "DeckLinkOutputDevice::AddFrame::qsize::" << "\t" << outputVideoFrameQueue.Size() << "\n";
qDebug() << "DeckLinkOutputDevice::AddFrame video---qsize:" << "\t" << outputVideoFrameQueue.Size() << "\n";
outputVideoFrameQueue.Push(image);
END_SLOT_TIME_COUNTER
}
void DeckLinkOutputDevice::AddAudioFrame(std::shared_ptr<AudioPacket> audio_packet)
{
START_SLOT_TIME_COUNTER
if (outputAudioFrameQueue.Size() >= 4)
qDebug() << "DeckLinkOutputDevice::AddAudioFrame audio---qsize:" << "\t" << outputAudioFrameQueue.Size() << "\n";
if (audio_packet)
{
outputAudioFrameQueue.Push(audio_packet);
}
END_SLOT_TIME_COUNTER
}
......@@ -252,6 +252,7 @@ void DeckLinkOutputPage::RequestedDeviceGranted(ComPtr<IDeckLink>& device)
// TODO: Connect
connect(BindingInputPage->GetCapture(), &CaptureThread::PushFrame, Process.get(), &ProcessThread::AddFrame);
connect(Process.get(), &ProcessThread::PushFrame, SelectedDevice.Get(), &DeckLinkOutputDevice::AddFrame);
connect(BindingInputPage->GetSelectedDevice().Get(), &DeckLinkInputDevice::PushAudioFrame, SelectedDevice.Get(), &DeckLinkOutputDevice::AddAudioFrame);
connect(BindingInputPage, &DeckLinkInputPage::FormatChanged, this, &DeckLinkOutputPage::FormatChanged);
......
......@@ -12,15 +12,18 @@
int OutputDeleyTime = 5;
int FrameRate = 50;
int PrvwFlag = 0;
qint64 StartTimeStamp = 0;
MomentaMedia::MomentaMedia(QWidget *parent)
: QMainWindow(parent)
{
ReadSettings();
ui.setupUi(this);
StartTimeStamp = QDateTime::currentMSecsSinceEpoch();
setWindowFlags(Qt::Window | Qt::WindowMinimizeButtonHint | Qt::WindowMaximizeButtonHint | Qt::WindowCloseButtonHint);
InputDevicePages[0] = ui.devicePage1;
InputDevicePages[1] = ui.devicePage2;
InputDevicePages[2] = ui.devicePage3;
......@@ -41,7 +44,6 @@ MomentaMedia::MomentaMedia(QWidget *parent)
DeleyTimeEdit->setValidator(intValidator);
connect(DeleyTimeEdit,&QLineEdit::textChanged,this,&MomentaMedia::DeleyTimeTextChanged);
ReadSettings();
QSize previewViewSize = ui.previewContainer->size();
......@@ -503,6 +505,10 @@ void MomentaMedia::ReadSettings()
QSettings settings(ini_path, QSettings::IniFormat);
settings.beginGroup("DELEYTIME");
OutputDeleyTime = settings.value("DELEY_TIME", "").toInt();
if (OutputDeleyTime < 1) OutputDeleyTime = 1;
FrameRate = settings.value("FRAME_RATE", "").toInt();
if (FrameRate < 25) FrameRate = 25;
PrvwFlag = settings.value("PRVW_FLAG", "").toInt();
//qDebug() << "deleyTime=" << deleyTime << endl;
settings.endGroup();
}
\ No newline at end of file
......@@ -7,6 +7,8 @@
#include <atomic>
#include "NDI/NDIOutputThread.h"
extern qint64 StartTimeStamp;
NDIOutputThread::NDIOutputThread(const QString& Name, int w, int h) : NDISenderName(Name), width(w), height(h), isSending(false), Instance(nullptr)
{
}
......@@ -93,7 +95,8 @@ void NDIOutputThread::run()
if(frame->IsValid())
{
frame->Fill(Frame.p_data, Frame.xres * Frame.yres * 4);
//qDebug() << "wgt-----------------------------"<<"ndi(" << NDISenderName << ")timestamp : " << frame->getInputFrameCurTimeStamp() - StartTimeStamp << "\n";
Frame.timestamp = frame->getInputFrameCurTimeStamp();
NDIlib_send_send_video_v2(Instance, &Frame);
}
taskQueue.Pop(frame);
......
......@@ -32,7 +32,7 @@ CaptureThread::~CaptureThread()
}
void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame)
void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const qint64& timestamp)
{
int64_t st = QDateTime::currentMSecsSinceEpoch();
int64_t st2 = GetCurrTimeMS();
......@@ -77,7 +77,9 @@ void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame)
tags = tag.c_str();
//PRINT_CURR_TIME(tags);
taskQueue.push(videoFrame);
//taskQueue.push(videoFrame);
videoFrameData video_data = { videoFrame,timestamp };
taskVideoQueue.push(video_data);
tag = "CaptureThread::AddFrame::S1::" + std::to_string(idx);
tags = tag.c_str();
......@@ -106,6 +108,7 @@ void CaptureThread::run()
START_WAIT_TIME_COUNTER
ComPtr<IDeckLinkVideoInputFrame> videoFrame;
qint64 cur_time = 0;
/*if (taskQueue.WaitFor(videoFrame))
{
END_WAIT_TIME_COUNTER
......@@ -120,17 +123,19 @@ void CaptureThread::run()
{
std::unique_lock<std::mutex> ulock(mutex);
while (taskQueue.empty()) {
while (taskVideoQueue.empty()) {
cv.wait(ulock);
}
videoFrame = taskQueue.front();
taskQueue.pop();
auto& video_data = taskVideoQueue.front();
videoFrame = video_data.video_frame;
cur_time = video_data.timestamp;
taskVideoQueue.pop();
}
if (videoFrame.Get() != nullptr)
{
auto cur_time = QDateTime::currentMSecsSinceEpoch();
qDebug() << "input frame cur time:" << cur_time << "\n";
//auto cur_time = QDateTime::currentMSecsSinceEpoch();
//qDebug() << "input frame cur time:" << cur_time << "\n";
std::shared_ptr<Image> image = std::make_shared<Image>(videoFrame,cur_time);
emit PushFrame(image);
}
......
#include "Utils/AudioPacket.h"
AudioPacket::AudioPacket(IDeckLinkAudioInputPacket* audioPacket, qint64& timestamp):frame_time_stamp(timestamp),channel(2)
{
if (audioPacket)
{
sample = audioPacket->GetSampleFrameCount();
size = sample * sizeof(int16_t) * channel;
void* data = nullptr;
audioPacket->GetBytes(&data);
if (data && size > 0) {
audio_data = QByteArray((char const*)data, size);
}
}
}
AudioPacket::~AudioPacket()
{
}
\ No newline at end of file
......@@ -54,6 +54,7 @@ int main(int argc, char *argv[])
qRegisterMetaType<Image>("Image");
qRegisterMetaType<Image>("Image&");
qRegisterMetaType<std::shared_ptr<Image>>("std::shared_ptr<Image>");
qRegisterMetaType<std::shared_ptr<AudioPacket>>("std::shared_ptr<AudioPacket>");
MomentaMedia w;
w.show();
......
This source diff could not be displayed because it is too large. You can view the blob instead.
[DELEYTIME]
DELEY_TIME=10
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
This source diff could not be displayed because it is too large. You can view the blob instead.
[DELEYTIME]
DELEY_TIME=5
FRAME_RATE=25
PRVW_FLAG=0
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment