Commit 063d74b4 by wangguotao

v2.1.0版本

parent c01e48f0
objct name changed "deviceOutputPage3"
objct name changed "deviceOutputPage4"
available device "DeckLink 8K Pro (1)"
available device "DeckLink 8K Pro (2)"
"2024-05-16 14:18:31.039" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
available device "DeckLink 8K Pro (3)"
"2024-05-16 14:18:31.059" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
available device "DeckLink 8K Pro (4)"
"2024-05-16 14:18:31.097" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:18:31.117" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:18:31.137" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:18:31.157" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:18:31.177" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:18:31.197" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:18:31.217" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:18:31.275" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:18:32.299" decklink input fps 50
"2024-05-16 14:18:33.319" decklink input fps 51
"2024-05-16 14:18:33.385" decklink output fps 51 , qsize 0
"2024-05-16 14:18:34.319" decklink input fps 50
"2024-05-16 14:18:34.405" decklink output fps 51 , qsize 0
"2024-05-16 14:18:35.339" decklink input fps 51
"2024-05-16 14:18:35.405" decklink output fps 50 , qsize 0
"2024-05-16 14:18:36.339" decklink input fps 50
"2024-05-16 14:18:36.404" decklink output fps 50 , qsize 0
IDI_ICON1 ICON DISCARDABLE "4k-n.ico"
IDI_ICON1 ICON DISCARDABLE "figureOut.ico"
......@@ -66,6 +66,7 @@
<AdditionalIncludeDirectories>.\ThirdParty\stb_image\;.\ThirdParty\rabbitmq\include;.\ThirdParty\ffmpeg-master-latest-win64-gpl-shared\include;.\ThirdParty\libyuv\include;.\ThirdParty\OpenCV\include;.\ThirdParty\NewTek\include;.\ThirdParty\BlackmagicDesign\include;.\include;%(AdditionalIncludeDirectories);$(Qt_INCLUDEPATH_)</AdditionalIncludeDirectories>
<ShowIncludes>false</ShowIncludes>
<PreprocessorDefinitions>WIN32;_WINSOCKAPI_;AMQP_STATIC;HAVE_CONFIG_H;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<OpenMPSupport>true</OpenMPSupport>
</ClCompile>
<Link>
<AdditionalDependencies>Processing.NDI.Lib.Advanced.x64.lib;opencv_world460.lib;yuv.lib;jpeg.lib;avformat.lib;avcodec.lib;avutil.lib;swscale.lib;rabbitmq.4.lib;ws2_32.lib;%(AdditionalDependencies);$(Qt_LIBS_)</AdditionalDependencies>
......@@ -133,16 +134,19 @@
<QtMoc Include="include\Threads\ProcessMaskThread.h" />
<QtMoc Include="include\Threads\ReplayThread.h" />
<QtMoc Include="include\Network\UdpServer.h" />
<QtMoc Include="include\Network\UdpSend.h" />
<ClInclude Include="include\Record\Record.h" />
<ClInclude Include="include\Record\RecordStore.h" />
<ClInclude Include="include\Record\RecordThread.h" />
<ClInclude Include="include\Threads\ZoomThread.h" />
<QtMoc Include="include\Threads\ZoomThread.h" />
<ClInclude Include="include\Utils\Algorithm.h" />
<ClInclude Include="include\Utils\AudioConvert.h" />
<ClInclude Include="include\Utils\Base64.h" />
<ClInclude Include="include\Utils\MaskBuffer.h" />
<ClInclude Include="include\Utils\Memory4k.h" />
<ClInclude Include="include\Utils\SafeMap.h" />
<ClInclude Include="include\Utils\SampleDeque.h" />
<ClInclude Include="include\Utils\Settings.h" />
<ClInclude Include="include\Utils\SSEFunction.h" />
<ClInclude Include="include\Utils\yuv4k.h" />
<ClInclude Include="ThirdParty\libyuv\include\libyuv.h" />
......@@ -219,6 +223,7 @@
<ClCompile Include="src\BlackMagicDesign\ProfileCallback.cpp" />
<ClCompile Include="src\BlackMagicDesign\ScreenPreviewCallback.cpp" />
<ClCompile Include="src\main.cpp" />
<ClCompile Include="src\Network\UdpSend.cpp" />
<ClCompile Include="src\Network\UdpServer.cpp" />
<ClCompile Include="src\Record\Record.cpp" />
<ClCompile Include="src\Record\RecordStore.cpp" />
......@@ -243,6 +248,7 @@
<ClCompile Include="src\Utils\LatencyStatistics.cpp" />
<ClCompile Include="src\Utils\MiniDumper.cpp" />
<ClCompile Include="src\Utils\Platform.cpp" />
<ClCompile Include="src\Utils\Settings.cpp" />
<ClCompile Include="src\Utils\VideoScale.cpp" />
<ClCompile Include="ThirdParty\BlackmagicDesign\include\DeckLinkAPI_i.c" />
</ItemGroup>
......@@ -250,6 +256,8 @@
<QtUic Include="Form\TimePlus.ui" />
</ItemGroup>
<ItemGroup>
<Image Include="Form\4K.ico" />
<Image Include="Form\figureOut.ico" />
<Image Include="Form\MomentaMedia.ico" />
</ItemGroup>
<ItemGroup>
......
......@@ -125,6 +125,12 @@
<QtMoc Include="include\Network\UdpServer.h">
<Filter>Header Files\Network</Filter>
</QtMoc>
<QtMoc Include="include\Threads\ZoomThread.h">
<Filter>Header Files\Threads</Filter>
</QtMoc>
<QtMoc Include="include\Network\UdpSend.h">
<Filter>Header Files\Network</Filter>
</QtMoc>
</ItemGroup>
<ItemGroup>
<ClInclude Include="include\stdafx.h">
......@@ -262,9 +268,6 @@
<ClInclude Include="include\Utils\MaskBuffer.h">
<Filter>Header Files\Utils</Filter>
</ClInclude>
<ClInclude Include="include\Threads\ZoomThread.h">
<Filter>Header Files\Threads</Filter>
</ClInclude>
<ClInclude Include="include\Utils\Algorithm.h">
<Filter>Header Files\Utils</Filter>
</ClInclude>
......@@ -289,6 +292,12 @@
<ClInclude Include="include\Utils\yuv4k.h">
<Filter>Header Files\Utils</Filter>
</ClInclude>
<ClInclude Include="include\Utils\SampleDeque.h">
<Filter>Header Files\Utils</Filter>
</ClInclude>
<ClInclude Include="include\Utils\Settings.h">
<Filter>Header Files\Utils</Filter>
</ClInclude>
</ItemGroup>
<ItemGroup>
<QtRcc Include="Form\MomentaMedia.qrc">
......@@ -409,6 +418,12 @@
<ClCompile Include="src\Record\RecordThread.cpp">
<Filter>Source Files\Record</Filter>
</ClCompile>
<ClCompile Include="src\Utils\Settings.cpp">
<Filter>Source Files\Utils</Filter>
</ClCompile>
<ClCompile Include="src\Network\UdpSend.cpp">
<Filter>Source Files\Network</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<QtUic Include="Form\TimePlus.ui">
......@@ -419,6 +434,12 @@
<Image Include="Form\MomentaMedia.ico">
<Filter>Resource Files</Filter>
</Image>
<Image Include="Form\4K.ico">
<Filter>Resource Files</Filter>
</Image>
<Image Include="Form\figureOut.ico">
<Filter>Resource Files</Filter>
</Image>
</ItemGroup>
<ItemGroup>
<Midl Include="ThirdParty\BlackmagicDesign\include\DeckLinkAPI.idl">
......
......@@ -14,11 +14,13 @@
#include "Utils/CustomEvents.h"
#include "Utils/Common.h"
#include "Utils/SampleQueue.h"
#include "Utils/SampleDeque.h"
#include "Utils/ComPtr.h"
#include "Utils/Platform.h"
#include "DeckLinkOutputVideoFrame.h"
#include "Utils/Image.h"
#include "Utils/AudioPacket.h"
#include "Utils/SafeMap.h"
class DeckLinkOutputDevice : public QObject, public IDeckLinkVideoOutputCallback
{
......@@ -57,15 +59,32 @@ public:
ComPtr<IDeckLink> GetDeckLinkInstance(void) const { return deckLink; }
ComPtr<IDeckLinkOutput> getDeckLinkOutput(void) const { return deckLinkOutput; }
void SetDeleyTime(qint32& deleyTime);
void SetSendSdiParams(SendSdiParams params);
public slots:
void AddAudioFrame(std::shared_ptr<AudioPacket> audio_packet);
void AddFrame(std::shared_ptr<Image> image);
void AddVideoFrameMask(std::shared_ptr<VideoFrameWithMask> frame);
void AddZoomFrame(std::shared_ptr<VideoFrameWithMask> frame);
signals:
void SendZoomResult(qint64 tm,bool ret);
private:
void InitResource();
HRESULT SetRP188VitcTimecodeOnFrame(IDeckLinkVideoFrame* videoFrame, uint8_t hours, uint8_t minutes, uint8_t seconds, uint8_t frames);
void ConvertFrameCountToTimecode(uint64_t frameCount, uint8_t* hours, uint8_t* minutes, uint8_t* seconds, uint8_t* frames);
void BGRAToUYVY(const std::shared_ptr<VideoFrameWithMask>& image);
// Private methods
void scheduleVideoFramesFunc(void);
void scheduleVideoFramesNoDeleyFunc(void);
void scheduleVideoFramesRealTimeFunc(void);
void scheduleVideoFramesWaitFunc(void);
void scheduleAudioFramesFunc(void);
void testWirteFileFrameFunc(void);
void scheduleAudioFramesFuncDeley(void);
void outputAudioFrameFunc(void);
bool waitForReferenceSignalToLock(void);
void checkEndOfPreroll(void);
private:
std::atomic<ULONG> RefCount;
PlaybackState state;
......@@ -78,6 +97,8 @@ private:
SampleQueue<std::shared_ptr<AudioPacket>> outputAudioFrameQueue;
SampleQueue<std::shared_ptr<Image>> writeVideoFrameQueue;
SampleQueue<std::shared_ptr<VideoFrameWithMask>> outputMaskVideoFrameQueue;
SampleDeque<std::shared_ptr<VideoFrameWithMask>> outputMaskVideoFrameDeque;
SampleQueue<std::shared_ptr<VideoFrameWithMask>> zoomVideoFrameQueue;
//ScheduledFramesList scheduledFramesList;
//
uint32_t videoPrerollSize;
......@@ -98,16 +119,6 @@ private:
//
ScheduledFrameCompletedCallback scheduledFrameCompletedCallback;
// Private methods
void scheduleVideoFramesFunc(void);
void scheduleVideoFramesNoDeleyFunc(void);
void scheduleAudioFramesFunc(void);
void testWirteFileFrameFunc(void);
void scheduleAudioFramesFuncDeley(void);
void outputAudioFrameFunc(void);
bool waitForReferenceSignalToLock(void);
void checkEndOfPreroll(void);
//
int64_t current_video_time = 0;
int64_t current_sleep_ms = 0;
......@@ -134,4 +145,13 @@ private:
uint64_t last_tm{ 0 };
std::map<uint64_t, std::shared_ptr<VideoFrameWithMask>> sort_map_;
//std::atomic<SendSDIStatus> send_sdi_status{ SSS_IDEL };
std::map<qint64, std::map<qint64, std::shared_ptr<VideoFrameWithMask>>*> masked_map;
std::map<qint64, SendSdiParams> masked_status_map;
SafeMap<qint64, std::shared_ptr<SampleQueue<std::shared_ptr<VideoFrameWithMask>>>> zoom_map;
qint64 send_sdi_start_time{0};
std::mutex sdi_clear_mutex;
std::atomic_bool send_zoom_frame_flag{ false };
SafeMap<qint64, std::shared_ptr<VideoFrameWithMask>> output_video_frame_map;
};
......@@ -15,7 +15,9 @@
#include "Threads/ConsumerMqThread.h"
class DeckLinkOutputPage : public QWidget,public DeckLinkInputPage::Listener
class DeckLinkOutputPage : public QWidget,
public DeckLinkInputPage::Listener,
public ConsumerMqThread::Listener
{
Q_OBJECT
......@@ -46,10 +48,14 @@ public:
Process->SetBlackBottomHeight(BlackBottomHeight);
InitControlValue(index + 1);
ProcessMask = process_mask;
ProcessMask->SetRecordStore(BindingInputPage->GetRecordStore());
if (ProcessMask) {
ProcessMask->SetRecordStore(BindingInputPage->GetRecordStore());
if (Zoom) ProcessMask->SetZoomThread(Zoom);
}
}
void OnRecvMsg(QByteArray data) override;
void OnRecvMqMsg(const QJsonDocument& document) override;
public slots:
void OutputDeviceChanged(int selectedDeviceIndex);
......@@ -69,7 +75,8 @@ public slots:
void RequestDeckLink(ComPtr<IDeckLink>& device);
void RequestDeckLinkIfAvailable(ComPtr<IDeckLink>& device);
void RelinquishDeckLink(ComPtr<IDeckLink>& device);
void PushMask(std::shared_ptr<MaskBuffer> buffer);
void PushAttribute(std::shared_ptr<SportAttribute> attr);
private:
void RestartOutput(void);
void SelectedDeviceChanged(void);
......@@ -80,6 +87,7 @@ private:
DeckLinkInputPage* BindingInputPage;
std::shared_ptr<ProcessThread> Process;
std::shared_ptr<ProcessMaskThread> ProcessMask;
std::shared_ptr<ZoomThread> Zoom;
qint32 Index;
std::unique_ptr<NDIOutputThread> NDIOutput;
......
......@@ -91,4 +91,6 @@ private:
uint8_t* uyvy_hd_buffer;
uint8_t* bgra_4k_buffer;
bool scale_to_hd;
qint64 normal_last_timestamp{0};
uint64_t last_recv_ts{ 0 };
};
\ No newline at end of file
#pragma once
#include <QThread>
#include <QUdpSocket>
#include <QHostAddress>
#include "Utils/SampleQueue.h"
class UdpSend : public QThread
{
Q_OBJECT
public:
static UdpSend& GetInstance();
void SendUdpMsg(const QString& msg, const QString serverAddr, const QString serverPort);
protected:
struct UdpPacket
{
QString msg;
QString serverAddr;
QString serverPort;
};
private:
UdpSend();
~UdpSend();
// ֹⲿ
UdpSend(const UdpSend& single) = delete;
// ֹⲿֵ
const UdpSend& operator=(const UdpSend& single) = delete;
void run() override;
private:
QUdpSocket* sendUdpSocket{NULL};
SampleQueue<UdpPacket> msg_queue;
};
\ No newline at end of file
#pragma once
#include <QObject>
#include <QUdpSocket>
#include <QHostAddress>
class UdpServer : public QObject
{
......@@ -10,10 +11,15 @@ public:
~UdpServer();
public:
void SetUpUDP(const QString hostAddr, const QString hostPort);
public slots:
void ReadDatagrams();
void HandleError(QAbstractSocket::SocketError err);
signals:
void SendMsg(QByteArray data);
private:
void SendUdpMsg(const QString& msg, const QString serverAddr, const QString serverPort);
private:
QUdpSocket* udpSocket{NULL};
QUdpSocket* sendUdpSocket{NULL};
};
\ No newline at end of file
......@@ -28,6 +28,7 @@ public:
public slots:
void AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame,const qint64& timestamp, const HDRMetadata& meta);
void recvReplay(bool& flag);
void RecvReplayStatus(const ReplayStatus& status);
signals:
//void PushFrame(std::shared_ptr<Image> image);
void PushFrame(std::shared_ptr<videoFrameData> frameData);
......@@ -57,6 +58,7 @@ private:
uint64_t m_lastRecvTS;
ComPtr<IDeckLinkVideoConversion> deckVideoConversion;
std::atomic_bool replay_flag{ false };
std::atomic<ReplayStatus> replay_status{RS_IDEL};
uint8_t* bgra_4k_data{ NULL };
std::shared_ptr<NDIOutputThread> NDIOutput{nullptr};
std::shared_ptr<RecordStore> RecordStorePtr{ nullptr };
......
......@@ -21,12 +21,18 @@ class ConsumerMqThread : public QThread
{
Q_OBJECT
public:
class Listener
{
public:
virtual void OnRecvMqMsg(const QJsonDocument& document) = 0;
};
public:
ConsumerMqThread(const std::string& queue_name, const std::string& exchange_name,
const std::string& ip, const std::string& user_id, const std::string& pwd);
const std::string& ip, const std::string& user_id, const std::string& pwd, Listener* listerner);
~ConsumerMqThread();
signals:
void PushMask(std::shared_ptr<MaskBuffer> buffer);
//signals:
// void PushMask(std::shared_ptr<MaskBuffer> buffer);
private:
void run() override;
bool setup_connection_and_channel();
......@@ -43,4 +49,6 @@ private:
uint32_t channel_id;
amqp_connection_state_t mq_connection;
Listener* mq_listener{ NULL };
};
\ No newline at end of file
......@@ -23,6 +23,7 @@ signals:
public:
void ClearQueue();
void SetRecordStore(RecordStore* store) { p_store = store; }
void SetZoomThread(std::shared_ptr<ZoomThread> thread) { zoom_thread = thread; }
void StartRecord(const uint64_t& start_time, const uint64_t& end_time);
protected:
......@@ -44,6 +45,7 @@ private:
void outputAlpha2(std::shared_ptr<MaskedBuffer> current, std::shared_ptr<MaskedBuffer> last,Rect& cross/*,uint8_t** cross_alpha*/);
void outputAlphaRect(std::shared_ptr<MaskedBuffer> current,const Rect& in_cross, Rect& out_cross);
void StartRecord_(const uint32_t& w,const uint32_t& h, const int32_t& fmt,const uint64_t& start_time,const uint64_t& end_time,const std::string& path);
void CropScale();
private:
//SampleQueue<MaskBuffer> mask_queue;
......@@ -52,6 +54,7 @@ private:
std::map<qint64, std::shared_ptr<MaskedBuffer>> tga_masked_map;
std::map<qint64, std::shared_ptr<MaskedBuffer>> store_masked_map;
SampleQueue<std::shared_ptr<videoFrameData>> taskImageQueue;
SampleQueue<std::shared_ptr<VideoFrameWithMask>> crop_queue;
bool once_save{ true };
uint8_t* tmp_bgra{NULL};
......@@ -76,4 +79,10 @@ private:
MaskStatus mask_status{MS_IDEL};
std::mutex mutex;
qint64 start_time{0};
uint64_t last_recv_ts{0};
std::thread crop_thread;
CropMessage crop_msg;
uint8_t* crop_data{NULL};
};
\ No newline at end of file
......@@ -21,167 +21,32 @@ const QString MODE_ACK = "checked_ok";
#define CROP720WIDTH 720
#define CROPHEIGHT 1080
extern int AspecNum;
extern int AspecDen;
//extern int AspecNum;
//extern int AspecDen;
class RoiMessage
{
public:
//#if USE_1080P
// RoiMessage() : w(CROP1080WIDTH), h(CROPHEIGHT)
// {
// x = 1920 / 2 - w / 2;
// y = 1080 / 2 - h / 2;
// timecode = 0;
// }
//#elif USE_4K_16_9
// RoiMessage() : w(CROP1080WIDTH2), h(CROPHEIGHT)
// {
// x = 1920 / 2 - w / 2;
// y = 1080 / 2 - h / 2;
// timecode = 0;
// }
//#else
// RoiMessage() : w(CROP720WIDTH), h(CROPHEIGHT)
// {
// x = 1920 / 2 - w / 2;
// y = 1080 / 2 - h / 2;
// timecode = 0;
// }
//#endif
RoiMessage() : h(CROPHEIGHT)
{
w = h * AspecDen / AspecNum;
w += w % 2;
x = 1920 / 2 - w / 2;
y = 1080 / 2 - h / 2;
centerX = 1920 / 2;
centerY = 0;
timecode = 0;
}
RoiMessage(QByteArray& data)
{
QJsonDocument document = QJsonDocument::fromJson(data);
QJsonObject object;
if (document.isObject())
{
object = document.object();
mode = object.value("signal").toString();
QJsonArray roi = object.value("roi").toArray();
int minx = roi[0].toInt();
int miny = roi[1].toInt();
int maxx = roi[2].toInt();
int maxy = roi[3].toInt();
id = object.value("id").toInt();
centerX = object.value("center_x").toInt();
centerY = object.value("center_y").toInt();
width = object.value("width").toInt();
height = object.value("height").toInt();
timecode = QString::number(object.value("timecode").toDouble(), 'f', 0).toLongLong();
h = CROPHEIGHT;
w = h * AspecDen / AspecNum;
x = minx;
y = miny;
}
}
RoiMessage(QByteArray&& data)
{
QJsonDocument document = QJsonDocument::fromJson(data);
QJsonObject object;
if (document.isObject())
{
object = document.object();
mode = object.value("signal").toString();
QJsonArray roi = object.value("roi").toArray();
int minx = roi[0].toInt();
int miny = roi[1].toInt();
int maxx = roi[2].toInt();
int maxy = roi[3].toInt();
id = object.value("id").toInt();
centerX = object.value("center_x").toInt();
centerY = object.value("center_y").toInt();
width = object.value("width").toInt();
height = object.value("height").toInt();
timecode = QString::number(object.value("timecode").toDouble(), 'f', 0).toLongLong();
h = CROPHEIGHT;
w = h * AspecDen / AspecNum;
x = minx;
y = miny;
}
}
RoiMessage(const RoiMessage& other) : x(other.x), y(other.y), w(other.w), h(other.h),
timecode(other.timecode),centerX(other.centerX),centerY(other.centerY)
{
}
RoiMessage(RoiMessage&& other)
{
x = other.x;
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
centerX = other.centerX;
centerY = other.centerY;
}
RoiMessage operator=(const RoiMessage& other)
{
x = other.x;
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
centerX = other.centerX;
centerY = other.centerY;
return *this;
}
RoiMessage operator=(RoiMessage&& other)
{
x = other.x;
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
centerX = other.centerX;
centerY = other.centerY;
return *this;
}
RoiMessage(int X, int Y, int W, int H)
:x(X),
y(Y),
w(W),
h(H),
timecode(0) {}
bool IsValid()
{
return x > 0 && y > 0 && w > 0 && h > 0;
}
void SetX(int x_)
{
this->x = x_;
}
int X() { return x; }
int Y() { return y; }
int CenterX() { return centerX; }
int CenterY() { return centerY; }
int Width() { return w; }
int Height() { return h; }
qint64 Timecode() { return timecode; }
RoiMessage();
RoiMessage(QByteArray& data);
RoiMessage(QByteArray&& data);
RoiMessage(const RoiMessage& other);
RoiMessage(RoiMessage&& other);
RoiMessage operator=(const RoiMessage& other);
RoiMessage operator=(RoiMessage&& other);
RoiMessage(int X, int Y, int W, int H);
bool IsValid();
void SetX(int x_);
int X();
int Y();
int CenterX();
int CenterY();
int Width();
int Height();
qint64 Timecode();
private:
int x;
......
......@@ -6,7 +6,7 @@
#include <queue>
#include <map>
class ReplayThread : public QThread
class ReplayThread : public QThread
{
Q_OBJECT
public:
......@@ -14,10 +14,12 @@ public:
~ReplayThread();
public slots:
void addFrame(std::shared_ptr<videoFrameData> frameData);
void recvReplayParams(ReplayParams& params,bool& flag);
signals:
//void PushFrame(std::shared_ptr<Image> image);
void PushFrame(std::shared_ptr<videoFrameData> frameData);
public:
bool CanReplay(const ReplayParams& params);
void recvReplayParams(const ReplayParams& params);
protected:
void run() override;
private:
......@@ -31,7 +33,9 @@ private:
std::queue<std::shared_ptr<videoFrameData>> replayVideoQueue2;*/
uint32_t max_store_size;
std::atomic_bool replay_flag{ false };
std::atomic<ReplayStatus> replay_status{ RS_IDEL };
uint32_t interval;
ReplayParams last_replay_params;
uint32_t replay_position{0};
uint32_t replay_position{ 0 };
qint64 current_seq{0};
};
\ No newline at end of file
#pragma once
#include <QThread>
#include <mutex>
#include <condition_variable>
#include <map>
#include <vector>
#include "Utils/Common.h"
#include "NDI/NDIOutputThread.h"
#include "Utils/SafeMap.h"
struct ZoomResultInfo
{
ZoomResultInfo() {}
~ZoomResultInfo()
{
if (frame) frame.reset();
}
std::atomic_bool drop{false};
std::shared_ptr<VideoFrameWithMask> frame {nullptr};
qint64 timestamp{0};
};
class ZoomThread : public QThread
{
Q_OBJECT
public:
ZoomThread();
~ZoomThread();
void setVideoFrame(const std::shared_ptr<VideoFrameWithMask>& frame);
void setResult(const ZoomResultInfo& info);
//void setPoints(const uint64_t& timecode,const QString& points);
signals:
void PushFrame(std::shared_ptr<VideoFrameWithMask> frame);
public slots:
void addSportAttr(std::shared_ptr<SportAttribute> attr);
void recvResult(qint64 tm, bool ret);
protected:
struct ScaleFrame
{
uint32_t scale_w;
uint32_t scale_h;
uint8_t* scale_data{NULL};
ScaleFrame(uint8_t* scale_data, uint8_t* crop_data, const uint32_t& scale_w, const uint32_t& scale_h,
const uint32_t& crop_w, const uint32_t& crop_h, const int32_t& offset_x, const int32_t& offset_y,const bool& repeat,const uint32_t& re_cnt):
scale_data(scale_data), crop_data(crop_data), scale_w(scale_w), scale_h(scale_h), crop_w(crop_w), crop_h(crop_h), offset_in_x(offset_x), offset_in_y(offset_y),
repeat(repeat), repeat_cnt(re_cnt)
{}
ScaleFrame(uint8_t* scale_data, uint8_t* crop_data, const uint32_t& scale_w, const uint32_t& scale_h,
const uint32_t& crop_w, const uint32_t& crop_h, const int32_t& offset_x, const int32_t& offset_y) :
scale_data(scale_data), crop_data(crop_data), scale_w(scale_w), scale_h(scale_h), crop_w(crop_w), crop_h(crop_h), offset_in_x(offset_x), offset_in_y(offset_y)
{}
ScaleFrame(uint8_t* scale_data, uint8_t* crop_data, const uint32_t& scale_w, const uint32_t& scale_h,
const uint32_t& crop_w, const uint32_t& crop_h, const int32_t& offset_x, const int32_t& offset_y,bool& reuse_scale) :
scale_data(scale_data), crop_data(crop_data), scale_w(scale_w), scale_h(scale_h), crop_w(crop_w), crop_h(crop_h), offset_in_x(offset_x), offset_in_y(offset_y),reuse_scale(reuse_scale)
{}
ScaleFrame(uint8_t* scale_data, uint8_t* crop_data, const uint32_t& scale_w, const uint32_t& scale_h,
const uint32_t& crop_w, const uint32_t& crop_h, const int32_t& offset_x, const int32_t& offset_y, const float_t& ration_x, const float_t& ration_y) :
scale_data(scale_data), crop_data(crop_data), scale_w(scale_w), scale_h(scale_h), crop_w(crop_w), crop_h(crop_h), offset_in_x(offset_x), offset_in_y(offset_y),
ration_x(ration_x), ration_y(ration_y)
{}
ScaleFrame(uint8_t* scale_data, uint8_t* crop_data, const uint32_t& scale_w, const uint32_t& scale_h,
const int32_t& offset_x, const int32_t& offset_y, const float_t& ration_x, const float_t& ration_y) :
scale_data(scale_data), crop_data(crop_data), scale_w(scale_w), scale_h(scale_h), crop_w(crop_w), crop_h(crop_h), offset_in_x(offset_x), offset_in_y(offset_y),
ration_x(ration_x), ration_y(ration_y)
{}
ScaleFrame(uint8_t* scale_data, uint8_t* crop_data, const uint32_t& scale_w, const uint32_t& scale_h,
const uint32_t& crop_w, const uint32_t& crop_h, const int32_t& offset_x, const int32_t& offset_y, const float_t& ration_x, const float_t& ration_y, const bool& repeat, const uint32_t& re_cnt) :
scale_data(scale_data), crop_data(crop_data), scale_w(scale_w), scale_h(scale_h), crop_w(crop_w), crop_h(crop_h), offset_in_x(offset_x), offset_in_y(offset_y),
ration_x(ration_x), ration_y(ration_y), repeat(repeat), repeat_cnt(re_cnt)
{}
bool reuse_scale{false};
bool repeat{ false };
uint8_t* scale_data{ NULL };
uint8_t* crop_data{ NULL };
uint32_t scale_w{ 0 };
uint32_t scale_h{ 0 };
uint32_t crop_w{ 0 };
uint32_t crop_h{ 0 };
int32_t offset_in_x{0};
int32_t offset_in_y{0};
uint32_t repeat_cnt{ 0 };
float_t ration_x{0.0};
float_t ration_y{ 0.0 };
};
//移动方向
enum ZoomDirect
{
ZD_IDEL = -1,
ZD_LTOR = 1,//从左到右
ZD_RTOL,
ZD_UTOD,//从上到下
ZD_DTOU,
};
enum ZoomStatus
{
ZS_IDEL = -1,
ZS_START = 1,
ZS_END,
};
struct FrameBuffer
{
uint8_t* buffer{NULL};
uint32_t width{0};
uint32_t height{0};
};
private:
void run() override;
bool DrawLine(std::vector<cv::Point>& points,uint32_t& min_x,uint32_t& max_x);
void ZoomIn(const int& index);
void ZoomIn(const int& index,const bool& omp,const bool& last_frame);
void CropAndScale(uint8_t* src_data,ScaleFrame& scale_frame, const qint64& seq);
void ScaleCrop(uint8_t* src_data, ScaleFrame& scale_frame, const qint64& seq);
void ScaleCrop(FrameBuffer& src_frame, ScaleFrame& scale_frame, const qint64& seq);
void SendFrameFunc();
private:
void ProcessWithRGBA();
void ProcessWithRGBANew();
void ProcessWithRGBASec();
void ProcessWithRGBALast();
void ProcessWithUYVY();
private:
bool PrepareZoom(std::shared_ptr<VideoFrameWithMask> src_frame);
bool ZoomIn(std::shared_ptr<ZoomResultInfo> p_info);
bool ZoomPan(std::shared_ptr<ZoomResultInfo> p_info);
bool ZoomOut(std::shared_ptr<ZoomResultInfo> p_info);
private:
std::shared_ptr<VideoFrameWithMask> src_frame_{nullptr};
uint8_t* scale_data_{NULL};
uint8_t* i422_data_ {NULL};
uint8_t* i422_4k_data_{NULL};//for move and zoom-out
std::condition_variable cv;
std::mutex mutex;
std::map<qint64, std::vector<Point>> map_points_;
std::map<qint64, std::shared_ptr<SportAttribute>> map_attrs_;
std::shared_ptr<NDIOutputThread> ndi_thread_{nullptr};
std::thread* p_send_thread{NULL};
SampleQueue<std::shared_ptr<videoFrameData>> send_queue_;
cv::Mat bgra_mat;
std::vector<ScaleFrame> vec_scale_frame;
std::vector<ScaleFrame> vec_in_frame;
std::vector<ScaleFrame> vec_move_scale;
std::vector<ScaleFrame> vec_out_scale;
float inc_ratio {0.0};
float increment_w{0.0};
float increment_h{0.0};
uint32_t increment_w;
uint32_t increment_h;
uint32_t scale_w{ 0 };
uint32_t scale_h{ 0 };
uint32_t scale_size{ 0 };
uint32_t src_w{ 0 };
uint32_t src_h{0};
qint64 start_time{0};
HDRMetadata meta;
cv::Point fir_point;
cv::Point end_point;
//std::atomic_bool zoom_end{ false };
int interval_ms{0};
uint32_t wait_cnt{0};
std::vector<cv::Point> sport_points;
ZoomDirect direct{ ZD_IDEL };
std::atomic<ZoomStatus> zoom_status{ ZS_IDEL };
std::map<int, std::shared_ptr<videoFrameData>> zoom_map;
uint32_t zoom_frame_count{0};
SafeMap<qint64, std::shared_ptr<ZoomResultInfo>> map_result;
cv::Point zoom_in_start;
uint32_t zoom_pan_len;
cv::Point zoom_out_start;
uint32_t in_frame_count;
uint32_t pan_frame_count;
uint32_t out_frame_count;
};
\ No newline at end of file
......@@ -85,6 +85,38 @@ struct MaskBuffer
}
}
MaskBuffer(const QJsonObject& object)
{
if (!object.isEmpty())
{
//QJsonObject object = document.object();
signal = object["signal"].toInt();
if (signal != -1)
{
QString base_data = object["mask"].toString();
if (base_data != "") mask_data = Base64::base64_decode(base_data.toStdString());
//mask_data = QByteArray::fromBase64(base_data.toLatin1());
/*auto size = str.length();
auto siez1 = mask_data.size();*/
QJsonArray array = object["coordinate_xyxy"].toArray();
upper_left_point.x = array[0].toInt();
upper_left_point.y = array[1].toInt();
lower_right_point.x = array[2].toInt();
lower_right_point.y = array[3].toInt();
width = lower_right_point.x - upper_left_point.x;
height = lower_right_point.y - upper_left_point.y;
timecode = QString::number(object.value("timecode").toDouble(), 'f', 0).toLongLong();
}
/*cv::Mat gray = cv::Mat(lower_right_point.y - upper_left_point.y, lower_right_point.x - upper_left_point.x, CV_8UC1, (char*)str.c_str());
cv::cvtColor(gray, gray, cv::COLOR_GRAY2BGR);
cv::imshow("gray", gray);
cv::waitKey(0);*/
}
}
~MaskBuffer()
{
......
#pragma once
#include <condition_variable>
#include <memory>
#include <mutex>
#include <map>
#include <mutex>
#include <vector>
template <typename K, typename V>
class SafeMap {
public:
SafeMap() {}
~SafeMap() {}
SafeMap(const SafeMap& rhs)
{
map_ = rhs.map_;
}
SafeMap& operator=(const SafeMap& rhs)
{
if (&rhs != this) {
map_ = rhs.map_;
}
return *this;
}
V& operator[](const K& key)
{
return map_[key];
}
bool WaitFor(int size)
{
std::unique_lock<std::mutex> lock(mutex_);
condition_.wait(lock, [&] {return (!map_.empty() && map_.size() >= size); });
if (!map_.empty())
{
return true;
}
return false;
}
bool WaitFor()
{
std::unique_lock<std::mutex> lock(mutex_);
condition_.wait(lock, [&] {return !map_.empty(); });
if (!map_.empty()) return true;
else return false;
}
bool Pop(K& key, V& value)
{
std::unique_lock<std::mutex> lock(mutex_);
if (map_.empty()) return false;
key = map_.begin()->first;
value = map_.begin()->second;
map_.erase(key);
return true;
}
bool Begin(K& key, V& value)
{
std::unique_lock<std::mutex> lock(mutex_);
if (map_.empty()) return false;
key = map_.begin()->first;
value = map_.begin()->second;
return true;
}
// when multithread calling size() return a tmp status, some threads may insert just after size() call
int Size()
{
std::unique_lock<std::mutex> lock(mutex_);
return map_.size();
}
// when multithread calling Empty() return a tmp status, some threads may insert just after Empty() call
bool IsEmpty()
{
std::unique_lock<std::mutex> lock(mutex_);
return map_.empty();
}
bool Insert(const K& key, const V& value)
{
std::unique_lock<std::mutex> lock(mutex_);
auto ret = map_.insert(std::pair<K, V>(key, value));
condition_.notify_all();
return ret.second;
}
void EnsureInsert(const K& key, const V& value)
{
std::unique_lock<std::mutex> lock(mutex_);
auto ret = map_.insert(std::pair<K, V>(key, value));
// find key and cannot insert
if (!ret.second) {
map_.erase(ret.first);
map_.insert(std::pair<K, V>(key, value));
}
condition_.notify_all();
return;
}
bool Find(const K& key, V& value)
{
bool ret = false;
std::unique_lock<std::mutex> lock(mutex_);
auto iter = map_.find(key);
if (iter != map_.end()) {
value = iter->second;
ret = true;
}
return ret;
}
bool Find(const K& key)
{
bool ret = false;
std::unique_lock<std::mutex> lock(mutex_);
auto iter = map_.find(key);
if (iter != map_.end()) {
ret = true;
}
return ret;
}
bool FindOldAndSetNew(const K& key, V& oldValue, const V& newValue)
{
bool ret = false;
std::unique_lock<std::mutex> lock(mutex_);
if (map_.size() > 0) {
auto iter = map_.find(key);
if (iter != map_.end()) {
oldValue = iter->second;
map_.erase(iter);
map_.insert(std::pair<K, V>(key, newValue));
ret = true;
}
}
return ret;
}
void Erase(const K& key)
{
std::unique_lock<std::mutex> lock(mutex_);
map_.erase(key);
}
void Clear()
{
std::unique_lock<std::mutex> lock(mutex_);
map_.clear();
return;
}
template<typename T>
class SafeMap
{
std::vector<K> Keys() const {
std::unique_lock<std::mutex> lock(mutex_);
std::vector<K> result;
for (auto& pair : map_) {
result.push_back(pair.first);
}
return result;
}
private:
mutable std::mutex mutex_;
std::map<K, V> map_;
std::condition_variable condition_;
};
\ No newline at end of file
#pragma once
#include <condition_variable>
#include <memory>
#include <mutex>
#include <queue>
#include <QDebug>
#include <qdatetime.h>
#include <chrono>
#include "Common.h"
template<typename T>
class SampleDeque
{
public:
SampleDeque();
virtual ~SampleDeque();
void Push(const T& sample);
void Push(T&& sample);
void PushBack(const T& sample);
bool Pop(T& sample);
bool Pop();
bool PopBack(T& sample);
bool PopBack();
bool Front(T& sample);
bool WaitFor(T& sample);
bool WaitFor(int size);
bool WaitUntil(T& sample, int timeout);
bool WaitUntil(int timeout);
void CancelWaiters(void);
void Reset(void);
bool Empty();
long Size();
bool Put(int size, std::vector<T>& vec);
private:
std::deque<T> deque;
std::condition_variable dequeCondition;
std::mutex mutex;
bool waitCancelled;
};
template<typename T>
SampleDeque<T>::SampleDeque():waitCancelled(false)
{
}
template<typename T>
SampleDeque<T>::~SampleDeque()
{
CancelWaiters();
}
template<typename T>
void SampleDeque<T>::Push(const T& sample)
{
{
std::lock_guard<std::mutex> locker(mutex);
deque.push_front(sample);
}
dequeCondition.notify_all();
}
template<typename T>
void SampleDeque<T>::PushBack(const T& sample)
{
{
std::lock_guard<std::mutex> locker(mutex);
deque.push_back(sample);
}
dequeCondition.notify_all();
}
template<typename T>
void SampleDeque<T>::Push(T&& sample)
{
{
std::lock_guard<std::mutex> locker(mutex);
deque.push_front(sample);
}
dequeCondition.notify_all();
}
template<typename T>
bool SampleDeque<T>::Pop(T& sample)
{
std::lock_guard<std::mutex> locker(mutex);
if (deque.empty())
return false;
sample = std::move(deque.front());
deque.pop_front();
return true;
}
template<typename T>
bool SampleDeque<T>::Pop()
{
std::lock_guard<std::mutex> locker(mutex);
if (deque.empty())
return false;
deque.pop_front();
return true;
}
template<typename T>
bool SampleDeque<T>::PopBack(T& sample)
{
std::lock_guard<std::mutex> locker(mutex);
if (deque.empty())
return false;
sample = std::move(deque.back());
deque.pop_back();
return true;
}
template<typename T>
bool SampleDeque<T>::PopBack()
{
std::lock_guard<std::mutex> locker(mutex);
if (deque.empty())
return false;
deque.pop_back();
return true;
}
template<typename T>
bool SampleDeque<T>::Front(T& sample)
{
std::lock_guard<std::mutex> locker(mutex);
if (deque.empty())
return false;
//sample = std::move(queue.front());
sample = deque.front();
return true;
}
template<typename T>
bool SampleDeque<T>::WaitFor(T& sample)
{
std::unique_lock<std::mutex> locker(mutex);
dequeCondition.wait(locker, [&] {return !deque.empty() || waitCancelled; });
if (waitCancelled)
return false;
else if (!deque.empty())
{
sample = std::move(deque.front());
deque.pop_front();
}
return true;
}
template<typename T>
bool SampleDeque<T>::WaitFor(int size)
{
std::unique_lock<std::mutex> locker(mutex);
dequeCondition.wait(locker, [&] {return (!deque.empty() && deuqe.size() >= size) || waitCancelled; });
if (waitCancelled)
return false;
else if (!deuqe.empty())
{
return true;
}
return false;
}
template<typename T>
bool SampleDeque<T>::WaitUntil(T& sample, int timeout)
{
std::unique_lock<std::mutex> locker(mutex);
auto delay = std::chrono::system_clock::now() + std::chrono::milliseconds(timeout);
dequeCondition.wait_until(locker, delay, [&] {return !deuqe.empty() || waitCancelled; });
if (waitCancelled || deque.empty())
return false;
else if (!deque.empty())
{
sample = std::move(deque.front());
deque.pop_front();
}
return true;
}
template<typename T>
bool SampleDeque<T>::WaitUntil(int timeout)
{
std::unique_lock<std::mutex> locker(mutex);
auto delay = std::chrono::system_clock::now() + std::chrono::milliseconds(timeout);
dequeCondition.wait_until(locker, delay, [&] {return !deque.empty() || waitCancelled; });
if (waitCancelled || deque.empty())
return false;
else if (!deque.empty())
{
return true;
}
return true;
}
template<typename T>
void SampleDeque<T>::CancelWaiters(void)
{
{
// signal cancel flag to terminate wait condition
std::lock_guard<std::mutex> locker(mutex);
waitCancelled = true;
}
dequeCondition.notify_all();
}
template<typename T>
void SampleDeque<T>::Reset(void)
{
std::lock_guard<std::mutex> locker(mutex);
deque.clear();
waitCancelled = false;
}
template<typename T>
bool SampleDeque<T>::Empty()
{
std::lock_guard<std::mutex> locker(mutex);
return deque.empty();
}
template<typename T>
long SampleDeque<T>::Size()
{
std::lock_guard<std::mutex> locker(mutex);
return deque.size();
}
template <typename T>
bool SampleDeque<T>::Put(int size, std::vector<T>& vec)
{
std::lock_guard<std::mutex> locker(mutex);
if (deque.empty())
return false;
int count = 0;
for (auto itor = deque.rbegin(); itor != deque.rend();itor++)
{
vec.push_back(*itor);
count++;
if (count == size) return true;
}
}
\ No newline at end of file
......@@ -175,7 +175,7 @@ template <typename T>
bool SampleQueue<T>::WaitFor(int size)
{
std::unique_lock<std::mutex> locker(mutex);
queueCondition.wait(locker, [&] {return (!queue.empty() && queue.size()>= size) || waitCancelled; });
queueCondition.wait(locker, [&] {return (!queue.empty() && queue.size()> size) || waitCancelled; });
if (waitCancelled)
return false;
......@@ -183,7 +183,7 @@ bool SampleQueue<T>::WaitFor(int size)
{
return true;
}
return true;
return false;
}
template <typename T>
......@@ -253,4 +253,5 @@ bool SampleQueue<T>::Empty()
{
std::lock_guard<std::mutex> locker(mutex);
return queue.empty();
}
\ No newline at end of file
}
#pragma once
#include <iostream>
#include <QString>
class Settings
{
public:
static int32_t ReplayStoreTime ;//µ¥Î»ms
static int32_t FrontDeleyTime ;//µ¥Î»ms
static int32_t FrameRate;
static int32_t OutputPlayMode;
static int32_t AudioChannel;
static int32_t ScaleMode ;
static int32_t AspecNum ;
static int32_t AspecDen ;
static int32_t OneFrameDuration ; //FOR NDI
static int32_t SDIOneFrameDuration; //FOR SDI
static int32_t RecordStoreDuration ;
static int32_t RecordFlag;
static int32_t OpenOMP ;
static int32_t TimeoutFrames ;
static int32_t SdiOutWaitNums;
static bool HaveBlackDataFlag;
static int32_t DrawFlag;
static int32_t ZoomFlag;
static float ZoomScale;
static int32_t ZoomScaleN;
static int32_t ZoomScaleD;
static int32_t ZoomInDuration;
static int32_t ZoomOutDuration;
static int32_t ZoomMoveDuration;
static int32_t ZoomUseOmp;
static int32_t ZoomScaleType; //1.libyuv 2.opencv
static int32_t ZoomScaleFilter;//libyuv:0-3 opencv:0-4
static uint32_t ZoomInWaitCnt;
static uint32_t ZoomMoveWaitCnt;
static uint32_t ZoomOutWaitCnt;
static uint32_t ReplayForward;
static uint32_t ReplayDeley;
static uint32_t SDIOutputWaitNums;
static int32_t CropFlag;
static int32_t CropX;
static int32_t CropY;
static int32_t CropDirection;
static int32_t UIUdpPort;
static QString UIIpAddr;
};
......@@ -5,10 +5,11 @@
#include "Utils/Platform.h"
#include "BlackMagicDesign/DeckLinkInputDevice.h"
#include "Utils/Common.h"
#include "Utils/Settings.h"
#include "BlackMagicDesign/DeckLinkInputPage.h"
#define NEED_AUDIO 0
extern int AudioChannel;
//extern int AudioChannel;
DeckLinkInputDevice::DeckLinkInputDevice(QObject* parent, ComPtr<IDeckLink>& device, int index)
: RefCount(1),
......@@ -89,6 +90,7 @@ HRESULT DeckLinkInputDevice::VideoInputFrameArrived(IDeckLinkVideoInputFrame* vi
ComPtr<IDeckLinkVideoInputFrame> frame = ComPtr<IDeckLinkVideoInputFrame>(videoFrame);
//emit ArrivedFrame(frame);
if (videoFrame && Capture) {
//qDebug() << GetCurrDateTimeStr() << " VideoInputFrameArrived"<< "\n";
//auto fmt = videoFrame->GetPixelFormat();
unsigned flags = videoFrame->GetFlags();
bool noInputSourceFlag = false;
......@@ -151,7 +153,7 @@ HRESULT DeckLinkInputDevice::VideoInputFrameArrived(IDeckLinkVideoInputFrame* vi
if (audioPacket)
{
auto cur_time = QDateTime::currentMSecsSinceEpoch();
std::shared_ptr<AudioPacket> audio_ptr = std::make_shared<AudioPacket>(audioPacket, cur_time, AudioChannel);
std::shared_ptr<AudioPacket> audio_ptr = std::make_shared<AudioPacket>(audioPacket, cur_time, Settings::AudioChannel);
emit PushAudioFrame(audio_ptr);
}
#endif // NEED_AUDIO
......@@ -298,7 +300,7 @@ bool DeckLinkInputDevice::StartCapture(BMDDisplayMode displayMode, IDeckLinkScre
if (DeckLinkInput->EnableVideoInput(bmdMode4K2160p50, bmdFormat8BitYUV, bmdVideoInputFlagDefault) != S_OK)
return false;
if (DeckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, bmdAudioSampleType32bitInteger, AudioChannel) != S_OK)
if (DeckLinkInput->EnableAudioInput(bmdAudioSampleRate48kHz, bmdAudioSampleType32bitInteger, Settings::AudioChannel) != S_OK)
return false;
// Set the capture
......
......@@ -279,35 +279,27 @@ void DeckLinkInputPage::RecvMsg(QByteArray json)
auto status = (ReplayStatus)object["status"].toInt();
auto start_time = object["start_time"].toInt();
auto end_time = object["end_time"].toInt();
if (end_time <= start_time) return;
ReplayParams params{ status, start_time ,end_time };
//if (end_time <= start_time || !Replay || !Replay->CanReplay(params)) return;
//qint64 timecode = QString::number(obj.value("timecode").toDouble(), 'f', 0).toLongLong();
switch (status)
{
case RS_START:
case RS_RE_START:
{
bool status = true;
if (Capture) Capture->recvReplay(status);
if (Replay) {
ReplayParams params{ start_time ,end_time };
Replay->recvReplayParams(params,status);
if (!Replay || !Replay->CanReplay(params))
{
if (Capture) Capture->RecvReplayStatus(RS_END);
break;
}
break;
}
case RS_END:
default:
{
bool status = false;
if (Capture) Capture->recvReplay(status);
if (Replay) {
ReplayParams params{ start_time ,end_time };
Replay->recvReplayParams(params, status);
}
if (Capture) Capture->RecvReplayStatus(status);
Replay->recvReplayParams(params);
break;
}
default:
break;
}
}
......@@ -430,8 +422,8 @@ void DeckLinkInputPage::ObjectNameChanged(const QString& newName)
//SelectedDevice.Get()->SetNDIOutputThread(NDIOutput);
//connect(Capture.get(), SIGNAL(PushFrame(std::shared_ptr<videoFrameData>)), NDIOutput.get(), SLOT(AddFrame(std::shared_ptr<videoFrameData>)));
connect(Replay.get(), SIGNAL(PushFrame(std::shared_ptr<videoFrameData>)), NDIOutput.get(), SLOT(AddFrame(std::shared_ptr<videoFrameData>)));
connect(Capture.get(), SIGNAL(PushFrameToReplay(std::shared_ptr<videoFrameData>)), Replay.get(), SLOT(addFrame(std::shared_ptr<videoFrameData>)));
connect(Replay.get(), SIGNAL(PushFrame(std::shared_ptr<videoFrameData>)), NDIOutput.get(), SLOT(AddFrame(std::shared_ptr<videoFrameData>)), Qt::DirectConnection);
connect(Capture.get(), SIGNAL(PushFrameToReplay(std::shared_ptr<videoFrameData>)), Replay.get(), SLOT(addFrame(std::shared_ptr<videoFrameData>))/*, Qt::DirectConnection*/);
connect(udpServer.get(), SIGNAL(SendMsg(QByteArray)),this, SLOT(RecvMsg(QByteArray)));
//NDIOutput->moveToThread(Capture.get());
//connect(SelectedDevice.Get(), SIGNAL(ArrivedFrame(ComPtr<IDeckLinkVideoInputFrame>)), NDIOutput.get(), SLOT(AddFrame(ComPtr<IDeckLinkVideoInputFrame>)));
......
......@@ -7,7 +7,9 @@
#include "TimePlus.h"
#include "BlackMagicDesign/DeckLinkOutputPage.h"
#include "Utils/Platform.h"
#include "Utils/Settings.h"
//extern int ZoomFlag;
namespace
{
......@@ -100,6 +102,18 @@ DeckLinkOutputPage::DeckLinkOutputPage() : SelectedDevice(nullptr), Process(null
BlackBottomHeight = BlackHeightEdit->text().toInt();
PreviewView->GetOverlay()->SetDeleyTime(DeleyTime);
if (Settings::ZoomFlag)
{
if (!Zoom)
{
Zoom = std::make_shared<ZoomThread>();
Zoom->start();
if (Zoom)
{
connect(this, &DeckLinkOutputPage::PushAttribute, Zoom.get(), &ZoomThread::addSportAttr);
}
}
}
connect(DeviceListCombo, QOverload<int>::of(&QComboBox::currentIndexChanged), this, &DeckLinkOutputPage::OutputDeviceChanged);
connect(VideoFormatCombo, QOverload<int>::of(&QComboBox::currentIndexChanged), this, &DeckLinkOutputPage::VideoFormatChanged);
......@@ -287,11 +301,18 @@ void DeckLinkOutputPage::RequestedDeviceGranted(ComPtr<IDeckLink>& device)
// TODO: Connect
#if USE_TIMEPLUS
connect(BindingInputPage->GetCapture(), &CaptureThread::PushFrame, ProcessMask.get(), &ProcessMaskThread::AddFrame);
connect(BindingInputPage->GetReplay(), &ReplayThread::PushFrame, ProcessMask.get(), &ProcessMaskThread::AddFrame);
connect(ProcessMask.get(), &ProcessMaskThread::PushFrame, NDIOutput.get(), &NDIOutputThread::AddVideoFrameWithMask);
if (MqThread) connect(MqThread.get(), &ConsumerMqThread::PushMask, ProcessMask.get(), &ProcessMaskThread::addMaskBuffer);
connect(ProcessMask.get(), &ProcessMaskThread::PushFrame, SelectedDevice.Get(), &DeckLinkOutputDevice::AddVideoFrameMask);
connect(BindingInputPage->GetCapture(), &CaptureThread::PushFrame, ProcessMask.get(), &ProcessMaskThread::AddFrame, Qt::DirectConnection);
connect(BindingInputPage->GetReplay(), &ReplayThread::PushFrame, ProcessMask.get(), &ProcessMaskThread::AddFrame, Qt::DirectConnection);
connect(ProcessMask.get(), &ProcessMaskThread::PushFrame, NDIOutput.get(), &NDIOutputThread::AddVideoFrameWithMask, Qt::DirectConnection);
//if (MqThread) connect(MqThread.get(), &ConsumerMqThread::PushMask, ProcessMask.get(), &ProcessMaskThread::addMaskBuffer);
connect(this, &DeckLinkOutputPage::PushMask, ProcessMask.get(), &ProcessMaskThread::addMaskBuffer);
connect(ProcessMask.get(), &ProcessMaskThread::PushFrame, SelectedDevice.Get(), &DeckLinkOutputDevice::AddVideoFrameMask, Qt::DirectConnection);
if (Zoom)
{
connect(Zoom.get(), &ZoomThread::PushFrame, SelectedDevice.Get(), &DeckLinkOutputDevice::AddZoomFrame, Qt::DirectConnection);
connect(SelectedDevice.Get(), &DeckLinkOutputDevice::SendZoomResult, Zoom.get(), &ZoomThread::recvResult, Qt::DirectConnection);
}
#elif USE_H2V
connect(BindingInputPage->GetCapture(), &CaptureThread::PushFrame, Process.get(), &ProcessThread::AddFrame);
connect(Process.get(), &ProcessThread::PushFrame, SelectedDevice.Get(), &DeckLinkOutputDevice::AddFrame);
......@@ -339,6 +360,34 @@ void DeckLinkOutputPage::RequestedDeviceGranted(ComPtr<IDeckLink>& device)
END_SLOT_TIME_COUNTER
}
void DeckLinkOutputPage::OnRecvMqMsg(const QJsonDocument& document)
{
if (!document.isObject()) return;
QJsonObject object = document.object();
auto type = object["type"].toString();
if (!object["data"].isObject()) return;
auto obj = object["data"].toObject();
if (obj.isEmpty()) return;
if (type == "SENDMASK")
{
auto buffer = std::make_shared<MaskBuffer>(obj);
if (buffer && buffer->signal != -1)
{
emit PushMask(buffer);
}
}
else if (type == "SENDPOINTS")
{
if (!Zoom) return;
auto attr = std::make_shared<SportAttribute>(obj);
if (attr && attr->timecode)
{
emit PushAttribute(attr);
}
}
}
void DeckLinkOutputPage::OnRecvMsg(QByteArray data)
{
QJsonDocument document = QJsonDocument::fromJson(data);
......@@ -368,6 +417,20 @@ void DeckLinkOutputPage::OnRecvMsg(QByteArray data)
ProcessMask->StartRecord(start_time, end_time);
}
}
else if (type == "RECONFIRM")
{
auto start_time = object["start_time"].toInt();
auto status = (SendSDIStatus)object["status"].toInt();
SendSdiParams params{ status,start_time };
if (SelectedDevice) SelectedDevice->SetSendSdiParams(params);
if (Zoom && status == SSS_CLEAR)
{
ZoomResultInfo info;
info.drop = true;
info.timestamp = start_time;
Zoom->setResult(info);
}
}
}
}
......@@ -487,7 +550,7 @@ void DeckLinkOutputPage::ObjectNameChanged(const QString& newName)
std::string queue_name = "test" + index.toStdString();
std::string exchange_name = queue_name;
MqThread = std::make_shared<ConsumerMqThread>(queue_name, exchange_name, "127.0.0.1", "admin", "123456");
MqThread = std::make_shared<ConsumerMqThread>(queue_name, exchange_name, "127.0.0.1", "admin", "123456",this);
MqThread->start();
}
#elif USE_H2V
......
......@@ -287,9 +287,13 @@ long DeckLinkOutputVideoFrame::GetRowBytes()
HRESULT DeckLinkOutputVideoFrame::GetBytes(void** buf)
{
//*buf = m_img->GetBytes();
if (videoMask && videoMask->data_)
if (videoMask)
{
*buf = videoMask->data_;
if (videoMask->data_) *buf = videoMask->data_;
else if (videoMask->pImage && videoMask->pImage->uyvy_data) {
*buf = videoMask->pImage->uyvy_data;
}
else return S_FALSE;
}
else return S_FALSE;
......
......@@ -8,9 +8,10 @@
#include "NDI/NDIOutputThread.h"
#include "Utils/Common.h"
#include "libyuv.h"
#include "Utils/Settings.h"
extern qint64 StartTimeStamp;
extern int OneFrameDuration;
//extern qint64 StartTimeStamp;
//extern int OneFrameDuration;
#define SENDBGRA 1
#define MAXSIZE 30
......@@ -141,7 +142,7 @@ bool NDIOutputThread::Init()
#endif
//Frame.p_data = (uint8_t*)malloc(Frame.xres * Frame.yres * 2);
Frame.frame_rate_D = 1;
Frame.frame_rate_N = 50;
Frame.frame_rate_N = Settings::FrameRate;
Frame.frame_format_type = NDIlib_frame_format_type_progressive;
Frame.picture_aspect_ratio = 16.0 / 9;
//Frame.timecode = NDIlib_send_timecode_synthesize;
......@@ -150,9 +151,9 @@ bool NDIOutputThread::Init()
if (cropFlag)
{
AudioFrame.FourCC = NDIlib_FourCC_audio_type_FLTP;
AudioFrame.no_channels = 2;
AudioFrame.no_channels = Settings::AudioChannel;
AudioFrame.sample_rate = 48000;
AudioFrame.p_data = (uint8_t*)malloc(MAXCHANNEL * 1600 * sizeof(float));
AudioFrame.p_data = (uint8_t*)malloc(AudioFrame.no_channels * 9600 * sizeof(float));
AudioCvtPtr = std::make_shared<AudioConvert>(AudioFrame.no_channels, bmdAudioSampleType32bitInteger);
}
......@@ -232,21 +233,22 @@ void NDIOutputThread::run()
}
else if (timePlusFlag)
{
if (VideoMaskQueue.WaitFor(frame_mask))
if (VideoMaskQueue.WaitFor(Settings::SDIOutputWaitNums))
{
//if (!frame_mask ||(frame_mask->mask_flag && !frame_mask->data_)) continue;
VideoMaskQueue.Pop(frame_mask);
if (!frame_mask || (!frame_mask->data_ && !frame_mask->pImage && !frame_mask->pImage->data)) continue;
if (OneFrameDuration > 0 && frame_mask->flag_ == BS_END)
if (Settings::OneFrameDuration > 0 && frame_mask->flag_ == BS_END)
{
end_blend_frame_ = frame_mask;
}
if (OneFrameDuration > 0 && send_count < OneFrameDuration && end_blend_frame_)
if (Settings::OneFrameDuration > 0 && send_count < Settings::OneFrameDuration && end_blend_frame_)
{
send_one_frame_flag = true;
send_count++;
}
else if (send_count >= OneFrameDuration && end_blend_frame_ && (frame_mask->flag_ == BS_IDEL || frame_mask->flag_ == BS_START))
else if (send_count >= Settings::OneFrameDuration && end_blend_frame_ && (frame_mask->flag_ == BS_IDEL || frame_mask->flag_ == BS_START))
{
send_one_frame_flag = false;
send_count = 0;
......@@ -321,8 +323,15 @@ void NDIOutputThread::run()
// Frame.p_data = src_buffer;
//#endif
}
Frame.timestamp = frame->timestamp;
Frame.timestamp = (frame->replaySeq ? frame->replaySeq : frame->timestamp);
/*if (normal_last_timestamp && normal_last_timestamp >= frame->timestamp)
{
Frame.timecode = frame->timestamp;
}*/
Frame.timecode = frame->timestamp;;
NDIlib_send_send_video_v2(Instance, &Frame);
//normal_last_timestamp = frame->timestamp;
//qDebug() << GetCurrDateTimeStr()<<":send ndi Frame.timestamp->" << Frame.timestamp << "\n";
//av_free(dstBuff);
......@@ -407,6 +416,11 @@ void NDIOutputThread::AddFilterFrame(const std::shared_ptr<videoFrameData>& fram
void NDIOutputThread::AddFrame(std::shared_ptr<videoFrameData> frame)
{
/*if (last_recv_ts)
{
qDebug() << "NDIThread:AddFrame:" << frame->timestamp << ",last recv duration:" << TimeMilliSecond() - last_recv_ts << "\n";
}
last_recv_ts = TimeMilliSecond();*/
if (VideoQueue.Size() > MAXSIZE)
{
qDebug() << "ndi send VideoQueue size than maxsize ,queue size:" << VideoQueue.Size() << "\n";
......
#include "Network/UdpSend.h"
UdpSend& UdpSend::GetInstance()
{
static UdpSend udpsend;
return udpsend;
}
void UdpSend::SendUdpMsg(const QString& msg, const QString serverAddr, const QString serverPort)
{
if (sendUdpSocket)
{
UdpPacket packet{msg,serverAddr,serverPort};
msg_queue.Push(packet);
}
}
void UdpSend::run()
{
UdpPacket packet;
while (true)
{
if (msg_queue.WaitFor(packet))
{
QByteArray byteArray = packet.msg.toUtf8();
if (sendUdpSocket && sendUdpSocket->writeDatagram(byteArray, QHostAddress(packet.serverAddr), packet.serverPort.toInt()) == -1)
{
qDebug() << "send udp msg fail,msg:" << packet.msg << "\n";
}
}
}
}
UdpSend::UdpSend()
{
if (!sendUdpSocket) sendUdpSocket = new QUdpSocket(this);
}
UdpSend::~UdpSend()
{
if (sendUdpSocket)
{
sendUdpSocket->close();
sendUdpSocket->deleteLater();
}
}
\ No newline at end of file
......@@ -12,17 +12,58 @@ UdpServer::~UdpServer()
udpSocket->close();
udpSocket->deleteLater();
}
if(sendUdpSocket)
{
sendUdpSocket->close();
sendUdpSocket->deleteLater();
}
}
void UdpServer::SetUpUDP(const QString hostAddr, const QString hostPort)
{
if(!udpSocket) udpSocket = new QUdpSocket(this);
//if (!sendUdpSocket) sendUdpSocket = new QUdpSocket(this);
if (udpSocket)
{
udpSocket->bind(QHostAddress(hostAddr), hostPort.toInt());
connect(udpSocket, SIGNAL(readyRead()), this, SLOT(ReadDatagrams()), Qt::DirectConnection);
}
if (sendUdpSocket)
{
connect(sendUdpSocket, SIGNAL(errorOccurred(QAbstractSocket::SocketError)), this, SLOT(HandleError(QAbstractSocket::SocketError)));
}
}
void UdpServer::SendUdpMsg(const QString& msg, const QString serverAddr, const QString serverPort)
{
if (sendUdpSocket)
{
QByteArray byteArray = msg.toUtf8();
if (sendUdpSocket->writeDatagram(byteArray, QHostAddress(serverAddr), serverPort.toInt()) == -1)
{
qDebug() << "send udp msg fail,msg:" << msg << "\n";
}
}
}
void UdpServer::HandleError(QAbstractSocket::SocketError error)
{
// 根据错误类型处理问题
switch (error) {
case QAbstractSocket::ConnectionRefusedError:
// 处理错误
break;
case QAbstractSocket::RemoteHostClosedError:
// 处理错误
break;
// 更多错误处理...
default:
// 处理其他错误
break;
}
}
void UdpServer::ReadDatagrams()
......
#include "Record/Record.h"
#include "libyuv.h"
#include "Utils/Settings.h"
extern int FrameRate;
//extern int FrameRate;
#define CLOCKHZ 90000
#define BITRATENUM 500000000
#define TESTFILE 0
......@@ -61,7 +62,7 @@ int Record::Init(const std::string& outfile, const int& w, const int& h, const A
pAvCodecContext->codec_id = pavcodec->id;
pAvCodecContext->pix_fmt = AV_PIX_FMT_YUV422P10LE;
int fps = FrameRate;
int fps = Settings::FrameRate;
int64_t bit_rate = BITRATENUM; //ƽ500M/b
pAvCodecContext->codec_type = AVMEDIA_TYPE_VIDEO;
pAvCodecContext->width = w;
......@@ -273,7 +274,7 @@ int Record::WriteBuffer(unsigned char* pBuffer, long& size)
else
{
pYUVFrame->pts = iPTS;
iPTS += CLOCKHZ / FrameRate;
iPTS += CLOCKHZ / Settings::FrameRate;
}
......
#include "Record/RecordStore.h"
#include "Record/RecordThread.h"
#include "Utils/Settings.h"
extern int FrameRate;
extern int RecordStoreDuration;
//extern int FrameRate;
//extern int RecordStoreDuration;
RecordStore::RecordStore()
{
max_size = RecordStoreDuration * FrameRate / 1000;
max_size = Settings::RecordStoreDuration * Settings::FrameRate / 1000;
}
RecordStore::~RecordStore()
......@@ -17,7 +18,7 @@ RecordStore::~RecordStore()
void RecordStore::RecvFrame(const std::shared_ptr<videoFrameData>& frame)
{
if(frame && frame->data)
//if(frame && frame->data)
{
std::lock_guard<std::mutex> lock(mutex);
video_map.insert({ frame->timestamp,frame });
......
......@@ -27,15 +27,17 @@ int RecordThread::Init(const int& w, const int& h, const AVPixelFormat& fmt, con
void RecordThread::run()
{
//qDebug() << save_path.c_str() <<" start record time:"<< GetCurrDateTimeStr() << "\n";
auto t1 = TimeMilliSecond();
std::shared_ptr<videoFrameData> frame;
while (video_queue.Size())
{
video_queue.Pop(frame);
uint8_t* data = frame->data;
long size = frame->size;
auto data = frame->uyvy_data;
long size = frame->uyvy_size;
ptr_record->WriteBuffer(data, size);
}
ptr_record->Close();
qDebug() << save_path.c_str() << " record duration time:" << TimeMilliSecond() - t1 << "\n";
//delete this;
}
\ No newline at end of file
......@@ -4,12 +4,12 @@
#include "Utils/yuv4k.h"
#include "Utils/Memory4k.h"
#include <omp.h>
#include "Utils/Settings.h"
#pragma intrinsic(memcpy)
extern int RecordFlag;
extern int OpenOMP;
//extern int RecordFlag;
//extern int OpenOMP;
static int64_t GetCurrTimeMS()
{
......@@ -37,7 +37,7 @@ CaptureThread::CaptureThread()
m_lastRecvTS(TimeMilliSecond())
//taskQueue(std::string("task")+ std::to_string(idx))
{
if (!bgra_4k_data) bgra_4k_data = new uint8_t[3840 * 2160 << 2];
//if (!bgra_4k_data) bgra_4k_data = new uint8_t[3840 * 2160 << 2];
//idx = s_count++;
//m_scale = new VideoScale(3840, 2160, AV_PIX_FMT_UYVY422, 3840, 2160, AV_PIX_FMT_BGRA);
//HRESULT result = CoCreateInstance(CLSID_CDeckLinkVideoConversion, nullptr, CLSCTX_ALL, IID_IDeckLinkVideoConversion, (void**)deckVideoConversion.GetAddressOf());
......@@ -78,12 +78,12 @@ void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const
/*std::shared_ptr<videoFrameData> videoFrame1 = std::make_shared<videoFrameData>(videoFrame, timestamp, timestamp);
if (NDIOutput) NDIOutput->AddFrame(videoFrame1);*/
if (video_data && video_data->data) taskVideoQueue.Push(video_data);
if (video_data && video_data->uyvy_data) taskVideoQueue.Push(video_data);
if (RecordFlag && RecordStorePtr && !replay_flag) {
/*if (RecordFlag && RecordStorePtr && (replay_status == RS_IDEL || replay_status == RS_END)) {
std::shared_ptr<videoFrameData> video_frame = std::make_shared<videoFrameData>(videoFrame, sequenceNum, sequenceNum,meta);
RecordStorePtr->RecvFrame(video_frame);
}
}*/
sequenceNum++;
/*if(video_data && video_data->data)
{
......@@ -96,6 +96,10 @@ void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const
END_SLOT_TIME_COUNTER
}
void CaptureThread::RecvReplayStatus(const ReplayStatus& status)
{
replay_status = status;
}
void CaptureThread::recvReplay(bool& start)
{
......@@ -107,18 +111,21 @@ void CaptureThread::run()
{
while(true)
{
START_WAIT_TIME_COUNTER
std::shared_ptr<videoFrameData> videoFrame;
std::shared_ptr<videoFrameData> videoFrame = nullptr;
qint64 cur_time = 0;
qint64 sequence = 0;
auto t1 = TimeMilliSecond();
if (taskVideoQueue.WaitFor(videoFrame))
{
END_WAIT_TIME_COUNTER
if (videoFrame && videoFrame->data)
//qDebug() << "capture thread get frame duration:" << TimeMilliSecond() - t1 << "\n";
if (videoFrame && videoFrame->uyvy_data)
{
//auto t1 = TimeMilliSecond();
if (Settings::RecordFlag && RecordStorePtr && (replay_status == RS_IDEL || replay_status == RS_END)) {
//std::shared_ptr<videoFrameData> video_frame = std::make_shared<videoFrameData>(videoFrame, sequenceNum, sequenceNum, meta);
RecordStorePtr->RecvFrame(videoFrame);
}
//qDebug() << videoFrame->sequenceNum << "capture time:" << GetCurrDateTimeStr() << "\n";
t1 = TimeMilliSecond();
if(videoFrame->fmt == bmdFormat8BitYUV || videoFrame->fmt == bmdFormat10BitYUV)
{
......@@ -131,7 +138,7 @@ void CaptureThread::run()
//qDebug() << "UYVYToARGB duration:" << TimeMilliSecond() - t1 << "\n";
//auto t2 = TimeMilliSecond();
videoFrame->size = size;
videoFrame->fmt = bmdFormat8BitBGRA;
//videoFrame->fmt = bmdFormat8BitBGRA;
if(size > videoFrame->capacity)
{
delete videoFrame->data;
......@@ -140,92 +147,56 @@ void CaptureThread::run()
}
size_t once_size = width * height;
uint32_t width_ = 1920;
uint32_t height_ = 1080;
size_t yuv_size = width_ * height_ << 1;
if (!OpenOMP)
uint32_t width_ = width >> 1;
uint32_t height_ = height >> 1;
if (!Settings::OpenOMP)
{
/*libyuv::UYVYToARGB(videoFrame->data, width << 1, bgra_4k_data, width << 2, width, height);
memcpy(videoFrame->data, bgra_4k_data, size);*/
/*uint8_t* src1 = videoFrame->data;
uint8_t* src2 = src1 + yuv_size;
uint8_t* src3 = src2 + yuv_size;
uint8_t* src4 = src3 + yuv_size;
uint8_t* dst1 = bgra_4k_data;
uint8_t* dst2 = dst1 + once_size;
uint8_t* dst3 = dst2 + once_size;
uint8_t* dst4 = dst3 + once_size;
uint8_t* dst11 = videoFrame->data;
uint8_t* dst12 = dst11 + once_size;
uint8_t* dst13 = dst12 + once_size;
uint8_t* dst14 = dst13 + once_size;
{
libyuv::UYVYToARGB(src1, width_ << 1, dst1, width_ << 2, width_, height_);
libyuv::UYVYToARGB(src2, width_ << 1, dst2, width_ << 2, width_, height_);
libyuv::UYVYToARGB(src3, width_ << 1, dst3, width_ << 2, width_, height_);
libyuv::UYVYToARGB(src4, width_ << 1, dst4, width_ << 2, width_, height_);
memcpy(dst11, dst1, once_size);
memcpy(dst12, dst2, once_size);
memcpy(dst13, dst3, once_size);
memcpy(dst14, dst4, once_size);
}*/
Yuv4k::UYVYToARGB4K(videoFrame->data, yuv_size, bgra_4k_data, once_size, width_, height_);
//auto t3 = TimeMilliSecond();
Memory::MemoryCopy4k(bgra_4k_data, videoFrame->data, once_size);
//qDebug() << "4k memcpy duration:" << TimeMilliSecond() - t3 << "\n";
size_t yuv_size = width_ * height_ << 1;
Yuv4k::UYVYToARGB4K(videoFrame->uyvy_data, yuv_size, videoFrame->data, once_size, width_, height_);
}
else
{
size_t once_size = width * height;
size_t once_size = (width * height << 1);
size_t num = size / once_size;
uint32_t width_ = 1920;
uint32_t height_ = 1080;
uint32_t width_ = width;
uint32_t height_ = (height >> 1);
size_t yuv_size = width_ * height_ << 1;
omp_set_num_threads(num);
#pragma omp parallel
{
#pragma omp for
for (int i = 0; i < num; i++)
{
auto dst = videoFrame->data + i * yuv_size;
auto src = bgra_4k_data + i * once_size;
libyuv::UYVYToARGB(dst, width_ << 1, src, width_ << 2, width_, height_);
//memcpy(dst, src, once_size);
}
#pragma omp for
for (int i = 0; i < num; i++)
#pragma omp parallel
{
auto dst = videoFrame->data + i * once_size;
auto src = bgra_4k_data + i * once_size;
//libyuv::UYVYToARGB(dst, width_ << 1, src, width_ << 2, width_, height_);
memcpy(dst, src, once_size);
#pragma omp for nowait
for (int i = 0; i < num; i++)
{
auto dst = videoFrame->data + i * once_size;
auto src = videoFrame->uyvy_data + i * yuv_size;
libyuv::UYVYToARGB(src, width_ << 1, dst, width_ << 2, width_, height_);
//qDebug() << "get omp thread id:" << omp_get_thread_num() << "\n";
//memcpy(dst, src, once_size);
}
//#pragma omp for
//for (int i = 0; i < num; i++)
//{
// auto dst = videoFrame->data + i * once_size;
// auto src = bgra_4k_data + i * once_size;
// //libyuv::UYVYToARGB(dst, width_ << 1, src, width_ << 2, width_, height_);
// memcpy(dst, src, once_size);
//}
}
}
}
//libyuv::ARGBCopy(bgra_4k_data, width << 2, videoFrame->data, width << 2, width, height);
//qDebug() << "4k memcpy duration:" << TimeMilliSecond() - t2 << "\n";
}
if (!replay_flag) {
//qDebug() << GetCurrDateTimeStr()<<" 4k yuvtobgra:" << TimeMilliSecond() - t1 << "\n";
t1 = TimeMilliSecond();
if (replay_status == RS_IDEL || replay_status == RS_END || replay_status == RS_PRE_START) {
if (NDIOutput) NDIOutput->AddFrame(videoFrame);
emit PushFrame(videoFrame);
}
......@@ -234,5 +205,6 @@ void CaptureThread::run()
}
//DEBUG_FUNCTION("taskQeueue Size: ", taskQueue.size())
}
//qDebug() << "capture thread send video duration:" << TimeMilliSecond() - t1 << "\n";
}
}
\ No newline at end of file
......@@ -2,8 +2,8 @@
#include "Utils/MaskBuffer.h"
ConsumerMqThread::ConsumerMqThread(const std::string& queue_name, const std::string& exchange_name,
const std::string& ip, const std::string& user_id, const std::string& pwd):mq_queue_name(queue_name),
mq_exchange_name(exchange_name),mq_ip(ip),mq_user_id(user_id),mq_pwd(pwd), mq_port(5672), channel_id(1)
const std::string& ip, const std::string& user_id, const std::string& pwd, Listener* listerner):mq_queue_name(queue_name),
mq_exchange_name(exchange_name),mq_ip(ip),mq_user_id(user_id),mq_pwd(pwd), mq_port(5672), channel_id(1),mq_listener(listerner)
{
}
......@@ -57,28 +57,31 @@ void ConsumerMqThread::read_msg()
{
amqp_destroy_message(&message);
continue;
}
}
//char* body = new char[message.body.len];
//memcpy(body, message.body.bytes, message.body.len);
//amqp_destroy_message(&message);
//QByteArray decode = QByteArray::fromBase64(QString(body).toLatin1());
//delete body;
std::shared_ptr<MaskBuffer> buffer = nullptr;
//std::shared_ptr<MaskBuffer> buffer = nullptr;
QByteArray array(QByteArray::fromRawData((char*)message.body.bytes, message.body.len));
QJsonDocument document = QJsonDocument::fromJson(array);
if (document.isObject())
if (mq_listener) mq_listener->OnRecvMqMsg(document);
/*if (document.isObject())
{
buffer = std::make_shared<MaskBuffer>(document);
}
amqp_destroy_message(&message);
amqp_release_buffers(mq_connection);
if (buffer && buffer->signal != -1)
{
emit PushMask(buffer);
}
}*/
amqp_destroy_message(&message);
amqp_release_buffers(mq_connection);
}
}
......
......@@ -2,21 +2,153 @@
#include "opencv2/opencv.hpp"
#include "libyuv.h"
#include "Utils//Common.h"
#include "Utils/Settings.h"
#define CUTBUFFERMAXSIZE 125*2
int ProcessThread::s_count = 0;
//extern int OutputDeleyTime;
extern int FrameRate;
extern int FrontDeleyTime;
//extern int BlackBottomHeight;
//extern int ScaleMode;
//extern std::map<qint32, qint32> map_output_delay;
extern bool HaveBlackDataFlag;
//extern int FrameRate;
//extern int FrontDeleyTime;
//extern bool HaveBlackDataFlag;
extern std::map<qint32, qint32> map_scale_mode;
RoiMessage::RoiMessage() : h(CROPHEIGHT)
{
w = h * Settings::AspecDen / Settings::AspecNum;
w += w % 2;
x = 1920 / 2 - w / 2;
y = 1080 / 2 - h / 2;
centerX = 1920 / 2;
centerY = 0;
timecode = 0;
}
RoiMessage::RoiMessage(QByteArray& data)
{
QJsonDocument document = QJsonDocument::fromJson(data);
QJsonObject object;
if (document.isObject())
{
object = document.object();
mode = object.value("signal").toString();
QJsonArray roi = object.value("roi").toArray();
int minx = roi[0].toInt();
int miny = roi[1].toInt();
int maxx = roi[2].toInt();
int maxy = roi[3].toInt();
id = object.value("id").toInt();
centerX = object.value("center_x").toInt();
centerY = object.value("center_y").toInt();
width = object.value("width").toInt();
height = object.value("height").toInt();
timecode = QString::number(object.value("timecode").toDouble(), 'f', 0).toLongLong();
h = CROPHEIGHT;
w = h * Settings::AspecDen / Settings::AspecNum;
x = minx;
y = miny;
}
}
RoiMessage::RoiMessage(QByteArray&& data)
{
QJsonDocument document = QJsonDocument::fromJson(data);
QJsonObject object;
if (document.isObject())
{
object = document.object();
mode = object.value("signal").toString();
QJsonArray roi = object.value("roi").toArray();
int minx = roi[0].toInt();
int miny = roi[1].toInt();
int maxx = roi[2].toInt();
int maxy = roi[3].toInt();
id = object.value("id").toInt();
centerX = object.value("center_x").toInt();
centerY = object.value("center_y").toInt();
width = object.value("width").toInt();
height = object.value("height").toInt();
timecode = QString::number(object.value("timecode").toDouble(), 'f', 0).toLongLong();
h = CROPHEIGHT;
w = h * Settings::AspecDen / Settings::AspecNum;
x = minx;
y = miny;
}
}
RoiMessage::RoiMessage(const RoiMessage& other) : x(other.x), y(other.y), w(other.w), h(other.h),
timecode(other.timecode), centerX(other.centerX), centerY(other.centerY)
{
}
RoiMessage::RoiMessage(RoiMessage&& other)
{
x = other.x;
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
centerX = other.centerX;
centerY = other.centerY;
}
RoiMessage RoiMessage::operator=(const RoiMessage& other)
{
x = other.x;
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
centerX = other.centerX;
centerY = other.centerY;
return *this;
}
RoiMessage RoiMessage::operator=(RoiMessage&& other)
{
x = other.x;
y = other.y;
w = other.w;
h = other.h;
timecode = other.timecode;
centerX = other.centerX;
centerY = other.centerY;
return *this;
}
RoiMessage::RoiMessage(int X, int Y, int W, int H)
:x(X),
y(Y),
w(W),
h(H),
timecode(0) {}
bool RoiMessage::IsValid()
{
return x > 0 && y > 0 && w > 0 && h > 0;
}
void RoiMessage::SetX(int x_)
{
this->x = x_;
}
int RoiMessage::X() { return x; }
int RoiMessage::Y() { return y; }
int RoiMessage::CenterX() { return centerX; }
int RoiMessage::CenterY() { return centerY; }
int RoiMessage::Width() { return w; }
int RoiMessage::Height() { return h; }
qint64 RoiMessage::Timecode() { return timecode; }
ProcessThread::ProcessThread()
: sendFrames(0),
......@@ -30,7 +162,7 @@ ProcessThread::ProcessThread()
idx = s_count++;
int key = idx + 1;
//output_deley_time = map_output_delay[key];
minTaskImageQueueSize = FrontDeleyTime / 1000 * FrameRate;
minTaskImageQueueSize = Settings::FrontDeleyTime / 1000 * Settings::FrameRate;
}
ProcessThread::~ProcessThread()
......@@ -80,6 +212,8 @@ void ProcessThread::AddFrame(std::shared_ptr<videoFrameData> frame_data)
// END_SLOT_TIME_COUNTER
//}
void ProcessThread::ReadDatagramsNew()
{
START_SLOT_TIME_COUNTER
......@@ -175,7 +309,7 @@ void ProcessThread::ReadDatagrams()
void ProcessThread::cutRunFront()
{
bool continue_flag = false;
int min_size = FrontDeleyTime * FrameRate / 1000;
int min_size = Settings::FrontDeleyTime * Settings::FrameRate / 1000;
qint64 seqnum = 0;
while (true)
{
......@@ -310,9 +444,9 @@ void ProcessThread::cutRunFront()
{
qDebug() << "idx:" << idx << "taskImageQueue size:"<< taskImageQueue.Size() <<",min_size:"<< min_size << endl;
}*/
while (taskImageQueue.Size() > (FrameRate * (output_deley_time / 1000 + FrontDeleyTime / 1000)))
while (taskImageQueue.Size() > (Settings::FrameRate * (output_deley_time / 1000 + Settings::FrontDeleyTime / 1000)))
{
qDebug() << "idx:" << idx << ",----------------lost Image size:" << taskImageQueue.Size() - FrameRate * (output_deley_time / 1000 + FrontDeleyTime / 1000) << endl;
qDebug() << "idx:" << idx << ",----------------lost Image size:" << taskImageQueue.Size() - Settings::FrameRate * (output_deley_time / 1000 + Settings::FrontDeleyTime / 1000) << endl;
taskImageQueue.Pop(image);
}
if (cutRuleMap.size() > CUTBUFFERMAXSIZE)
......@@ -580,9 +714,9 @@ void ProcessThread::WorkCutImage(std::shared_ptr<videoFrameData>& pImage, RoiMes
int dstw = 1920, dsth = 1080, scalew = 0, scaleh = 1080;
uint8_t* buff3 = NULL;
if (HaveBlackDataFlag)
if (Settings::HaveBlackDataFlag)
{
scalew = AspecNum * scaleh / AspecDen;
scalew = Settings::AspecNum * scaleh / Settings::AspecDen;
uint8_t* buff2 = new uint8_t[scalew * scaleh << 1];
uint8_t* scale_buffer_y = buff2;
uint8_t* scale_buffer_u = scale_buffer_y + scalew * scaleh;
......@@ -776,9 +910,9 @@ void ProcessThread::WorkCutImage(std::shared_ptr<Image>& pImage, RoiMessage& roi
int dstw = 1920, dsth = 1080, scalew = 0, scaleh = 1080;
uint8_t* buff3 = NULL;
if (HaveBlackDataFlag)
if (Settings::HaveBlackDataFlag)
{
scalew = AspecNum * scaleh / AspecDen;
scalew = Settings::AspecNum * scaleh / Settings::AspecDen;
uint8_t* buff2 = new uint8_t[scalew * scaleh << 2];
libyuv::ARGBScale(buff1, (roi.Height() << 2 << 1), roi.Height() << 1, roi.Width() << 1,
buff2, scalew << 2, scalew, scaleh, libyuv::FilterMode::kFilterNone);
......
#include "Threads/ReplayThread.h"
#include "Utils/Settings.h"
#include "Network/UdpSend.h"
extern int ReplayStoreTime;
extern int FrameRate;
//extern int ReplayStoreTime;
//extern int FrameRate;
ReplayThread::ReplayThread()
{
max_store_size = ReplayStoreTime / 1000 * FrameRate;
interval = 1000 / FrameRate;
max_store_size = Settings::ReplayStoreTime / 1000 * Settings::FrameRate;
interval = 1000 / Settings::FrameRate;
}
ReplayThread::~ReplayThread()
......@@ -18,106 +20,179 @@ void ReplayThread::addFrame(std::shared_ptr<videoFrameData> frameData)
{
if(frameData && frameData->data)
{
//qDebug()<< frameData->sequenceNum << "replay time:" << GetCurrDateTimeStr() << "\n";
{
std::unique_lock<std::mutex> ulock(mutex);
if(!replay_flag) storeVideoMap.insert({ frameData->timestamp,frameData });
if(storeVideoMap.size() > max_store_size)
{
current_seq = frameData->sequenceNum;
if (replay_params.status == RS_IDEL || replay_params.status == RS_END) storeVideoMap.insert({ frameData->timestamp,frameData });
if (storeVideoMap.size() > max_store_size)
{
storeVideoMap.erase(storeVideoMap.begin()->first);
}
if (replay_flag) {
if ((replay_params.status == RS_START || replay_params.status == RS_RE_START)) {
cv.notify_all();
}
}
}
}
void ReplayThread::recvReplayParams(ReplayParams& params, bool& flag)
void ReplayThread::recvReplayParams(const ReplayParams& params)
{
std::unique_lock<std::mutex> ulock(mutex);
replay_params = params;
if(replay_flag != flag) replay_flag = flag;
if(flag && !replayVideoVec.empty())
//if(replay_flag != flag) replay_flag = flag;
if((replay_params.status == RS_START || replay_params.status == RS_RE_START) && !replayVideoVec.empty())
{
replay_position = 0;
}
}
bool ReplayThread::CanReplay(const ReplayParams& params)
{
auto& start_time = params.start_time;
auto& end_time = params.end_time;
{
std::unique_lock<std::mutex> ulock(mutex);
if (end_time <= start_time ||
storeVideoMap.find(start_time) == storeVideoMap.end() ||
storeVideoMap.find(end_time) == storeVideoMap.end())
{
replay_params.status = RS_END;
return false;
}
else return true;
}
}
void ReplayThread::run()
{
std::shared_ptr<videoFrameData> resend_frame = nullptr;
qint32 resend_num = 0;
qint64 resend_tm = 0;
bool resend_start = false;
while (true)
{
{
std::unique_lock<std::mutex> ulock(mutex);
cv.wait(ulock);
if(last_replay_params.start_time != replay_params.start_time ||
last_replay_params.end_time != replay_params.end_time)
}
if (last_replay_params.start_time != replay_params.start_time ||
last_replay_params.end_time != replay_params.end_time)
{
/*std::queue<std::shared_ptr<videoFrameData>> empty;
if (!replayVideoQueue1.empty()) std::swap(empty, replayVideoQueue1);
if (!replayVideoQueue2.empty()) std::swap(empty, replayVideoQueue2);*/
if (!replayVideoVec.empty()) std::vector<std::shared_ptr<videoFrameData>>().swap(replayVideoVec);
replay_position = 0;
last_replay_params = replay_params;
}
if (replayVideoVec.empty())
{
//qint32 frame_nums = replay_params.end_time - replay_params.end_time + 1;
//auto t1 = TimeMilliSecond();
auto tm_begin = replay_params.start_time - Settings::ReplayForward;
auto tm_end = replay_params.end_time + Settings::ReplayDeley;
auto begin_tm = storeVideoMap.begin()->first;
auto end_tm = storeVideoMap.rbegin()->first;
if (tm_begin < begin_tm) tm_begin = begin_tm;
if (tm_end > end_tm) tm_end = end_tm;
size_t size = tm_end - tm_begin + 1;
/*auto& tm_end = replay_params.end_time;
auto& tm_begin = replay_params.start_time;*/
auto itor_end = storeVideoMap.find(tm_end);
auto itor_begin = storeVideoMap.find(tm_begin);
if (itor_end == storeVideoMap.end() || itor_begin == storeVideoMap.end() || tm_end <= tm_begin)
{
/*std::queue<std::shared_ptr<videoFrameData>> empty;
if (!replayVideoQueue1.empty()) std::swap(empty, replayVideoQueue1);
if (!replayVideoQueue2.empty()) std::swap(empty, replayVideoQueue2);*/
if(!replayVideoVec.empty()) std::vector<std::shared_ptr<videoFrameData>>().swap(replayVideoVec);
replay_position = 0;
last_replay_params = replay_params;
qint32 errNo = (itor_end == storeVideoMap.end() ? 2 : (itor_begin == storeVideoMap.end() ? 1 : 3));
QString msg = "{\"type\":\"ReplayResp\",\"data\":{\"inTime\":" + QString::number(replay_params.start_time) +",\"outTime\":" + QString::number(replay_params.end_time) +
",\"status\":0,\"errNo\":" + QString::number(errNo) +"}}";
UdpSend::GetInstance().SendUdpMsg(msg, Settings::UIIpAddr, QString::number(Settings::UIUdpPort));
qDebug() << "replay fail,errno:"<<errNo << "\n";
continue;
}
if(replayVideoVec.empty())
bool first = true;
resend_frame = nullptr;
resend_num = (Settings::SDIOneFrameDuration > Settings::SDIOneFrameDuration ? Settings::SDIOneFrameDuration : Settings::SDIOneFrameDuration);
for (auto itor = itor_begin;; itor++)
{
//qint32 frame_nums = replay_params.end_time - replay_params.end_time + 1;
auto& tm_end = replay_params.end_time;
auto& tm_begin = replay_params.start_time;
auto itor_end = storeVideoMap.find(tm_end);
auto itor_begin = storeVideoMap.find(tm_begin);
if(itor_end == storeVideoMap.end() || itor_begin == storeVideoMap.end() || tm_end <= tm_begin)
{
qDebug() << "replay fail ..................." << "\n";
continue;
}
for (auto itor = itor_begin;; itor++)
replayVideoVec.emplace_back(itor->second);
if (itor->first > replay_params.end_time && first)
{
replayVideoVec.emplace_back(itor->second);
if (itor == itor_end)
{
break;
}
resend_frame = itor->second;
first = false;
}
//replay_position = replayVideoVec.size() - 1;
/*auto itor_top = storeVideoMap.lower_bound(tm_top);
if(itor_tail == storeVideoMap.end() || itor_top == storeVideoMap.end())
{
qDebug() << "replay fail ..................." << "\n";
continue;
if (itor == itor_end)
{
break;
}
tm_top = itor_top->first;
tm_tail = itor_tail->first;
}
if (replayVideoVec.size() != size)
{
qDebug() << "replay find size err ..................." << "\n";
}
for(auto itor = itor_top;itor != itor_tail;itor++)
{
replayVideoVec.emplace_back(itor->second);
}
replayVideoVec.emplace_back(itor_top->second);*/
//qDebug() << "replay find duration:" << TimeMilliSecond() - t1 << "\n";
//replay_position = replayVideoVec.size() - 1;
/*auto itor_top = storeVideoMap.lower_bound(tm_top);
if(itor_tail == storeVideoMap.end() || itor_top == storeVideoMap.end())
{
qDebug() << "replay fail ..................." << "\n";
continue;
}
tm_top = itor_top->first;
tm_tail = itor_tail->first;
for(auto itor = itor_top;itor != itor_tail;itor++)
{
replayVideoVec.emplace_back(itor->second);
}
replayVideoVec.emplace_back(itor_top->second);*/
}
if (!replayVideoVec.empty())
{
std::shared_ptr<videoFrameData> frame = nullptr;
frame = (resend_frame ? (resend_start ? resend_frame : replayVideoVec[replay_position] ) : replayVideoVec[replay_position]);
frame->replaySeq = current_seq;
emit PushFrame(frame);
if ((!resend_start || !resend_frame) && replay_position < (replayVideoVec.size() - 1)) replay_position++;
if (!replayVideoVec.empty())
if (resend_frame)
{
auto frame = replayVideoVec[replay_position];
emit PushFrame(frame);
if(replay_position < (replayVideoVec.size() - 1)) replay_position++;
/*if (replay_position == 0)
if (resend_start && resend_num) resend_num--;
if (!resend_start && resend_num && frame->timestamp == resend_frame->timestamp)
{
resend_start = true;
}
if (resend_num <= 0 && resend_start)
{
replay_position = replayVideoVec.size();
}*/
//if(replay_position > replayVideoVec.size() - 1) replay_position++;
//else {
// //TODO 此处主要针对当次replay到最后一帧之后就结束发送数据 需要实际测试此处的情况
// replay_position = 0;//这个是为了下次还是重复当次的数据用
// //if (replay_flag) replay_flag = false; //这个是避免在不知道播控情况下 重头开始发送数据
//}
resend_start = false;
resend_num = (Settings::SDIOneFrameDuration > Settings::SDIOneFrameDuration ? Settings::SDIOneFrameDuration : Settings::SDIOneFrameDuration);
}
}
//qDebug() << "replay time:" << GetCurrDateTimeStr() << ",send frame timecode:" << frame->timestamp << ",replayseq:" << frame->replaySeq << "\n";
/*if (replay_position == 0)
{
replay_position = replayVideoVec.size();
}*/
//if(replay_position > replayVideoVec.size() - 1) replay_position++;
//else {
// //TODO 此处主要针对当次replay到最后一帧之后就结束发送数据 需要实际测试此处的情况
// replay_position = 0;//这个是为了下次还是重复当次的数据用
// //if (replay_flag) replay_flag = false; //这个是避免在不知道播控情况下 重头开始发送数据
//}
}
}
}
\ No newline at end of file
This source diff could not be displayed because it is too large. You can view the blob instead.
#include "Utils/Settings.h"
int32_t Settings::ReplayStoreTime = 10000;
int32_t Settings::FrontDeleyTime = 3000;
int32_t Settings::FrameRate = 50;
int32_t Settings::OutputPlayMode = 0;
int32_t Settings::AudioChannel = 2;
int32_t Settings::ScaleMode = 0;
int32_t Settings::AspecNum = 0;
int32_t Settings::AspecDen = 0;
int32_t Settings::OneFrameDuration = 50;
int32_t Settings::SDIOneFrameDuration = 50;
int32_t Settings::RecordStoreDuration = 10000;
int32_t Settings::RecordFlag = 0;
int32_t Settings::OpenOMP = 1;
int32_t Settings::TimeoutFrames = 50;
int32_t Settings::SdiOutWaitNums = 100;
bool Settings::HaveBlackDataFlag = false;
int32_t Settings::DrawFlag = 0;
int32_t Settings::ZoomFlag = 0;
float Settings::ZoomScale = 1.0;
int32_t Settings::ZoomScaleN = 1;
int32_t Settings::ZoomScaleD = 2;
int32_t Settings::ZoomInDuration = 0;
int32_t Settings::ZoomOutDuration = 0;
int32_t Settings::ZoomMoveDuration = 0;
int32_t Settings::ZoomUseOmp = 1;
int32_t Settings::ZoomScaleType = 3;
int32_t Settings::ZoomScaleFilter = 4;
uint32_t Settings::ZoomInWaitCnt = 0;
uint32_t Settings::ZoomMoveWaitCnt = 0;
uint32_t Settings::ZoomOutWaitCnt = 0;
uint32_t Settings::ReplayForward = 0;
uint32_t Settings::ReplayDeley = 0;
uint32_t Settings::SDIOutputWaitNums = 0;
int32_t Settings::CropFlag = 0;
int32_t Settings::CropX = 0;
int32_t Settings::CropY = 0;
int32_t Settings::CropDirection = 1;
int32_t Settings::UIUdpPort = 8100;
QString Settings::UIIpAddr = "127.0.0.1";
\ No newline at end of file
......@@ -48,10 +48,17 @@ int main(int argc, char *argv[])
tmpFlag |= _CRTDBG_LEAK_CHECK_DF;
_CrtSetDbgFlag(tmpFlag);
HANDLE hMutex = CreateMutex(NULL, TRUE, L"FigureOut");
if (GetLastError() == ERROR_ALREADY_EXISTS)
{
CloseHandle(hMutex);//
return 0;
}
CMiniDumper dumper(false);
log_fp = fopen("MomentaMedia.log", "w");
log_fp = fopen("FigureOut.log", "w");
OrbitScope("MomentaMedia");
OrbitScope("FigureOut");
qInstallMessageHandler(myMessageOutput);
QApplication a(argc, argv);
......@@ -74,6 +81,9 @@ int main(int argc, char *argv[])
qRegisterMetaType<MaskBuffer>("MaskBuffer");
qRegisterMetaType<MaskBuffer>("MaskBuffer&"); //
qRegisterMetaType<std::shared_ptr<MaskBuffer>>("std::shared_ptr<MaskBuffer>");
qRegisterMetaType<SportAttribute>("SportAttribute");
qRegisterMetaType<SportAttribute>("SportAttribute&"); //
qRegisterMetaType<std::shared_ptr<SportAttribute>>("std::shared_ptr<SportAttribute>");
qRegisterMetaType<ReplayParams>("ReplayParams");
qRegisterMetaType<ReplayParams>("ReplayParams&");
/*FILE* fp = fopen("D:/png/1.txt", "rb");
......
No preview for this file type
No preview for this file type
objct name changed "deviceOutputPage3"
objct name changed "deviceOutputPage4"
available device "DeckLink 8K Pro (1)"
available device "DeckLink 8K Pro (2)"
available device "DeckLink 8K Pro (3)"
available device "DeckLink 8K Pro (4)"
"2024-05-16 14:30:21.752" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:30:21.811" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:30:21.831" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:30:21.851" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:30:21.871" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:30:21.891" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:30:21.911" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:30:21.931" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:30:21.989" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-16 14:30:23.034" decklink input fps 51
......@@ -23,12 +23,32 @@ BLACK_BOTTOM_HEIGHT_1=16
BLACK_BOTTOM_HEIGHT_2=20
BLACK_BOTTOM_HEIGHT_3=20
BLACK_BOTTOM_HEIGHT_4=30
ZOOM_DRAW=0
ZOOM_FLAG=0
ZOOM_SCALE=1.5
ZOOM_SCALE_N=1
ZOOM_SCALE_D=2
ZOOM_IN_D=2
ZOOM_MOVE_D=2
ZOOM_OUT_D=2
ZOOM_IN_WAIT=15
ZOOM_MOVE_WAIT=15
ZOOM_OUT_WAIT=15
ZOOM_USE_OMP=1
ZOOM_SCALE_TYPE=2
ZOOM_SCALE_FILTER=3
NDIONEFRAMEDURATION=50
SDIONEFRAMEDURATION=50
RECORDFLAG=1
OPENOMP=0
OPENOMP=1
TIMEOUTFRAMES=250
DELEYSDINUMS=0
REPLAY_START_FORWARD=50
REPLAY_END_DELEY=50
SDIOUTPUTWAITNUMS=5
CROPRECORD=0c
CROP_X=400
CROP_Y=1800
CROP_DIRECTION=3
UI_UDP_PORT=8100
UI_IP_ADDR=192.168.31.83
......@@ -23,12 +23,32 @@ BLACK_BOTTOM_HEIGHT_1=16
BLACK_BOTTOM_HEIGHT_2=20
BLACK_BOTTOM_HEIGHT_3=20
BLACK_BOTTOM_HEIGHT_4=30
ZOOM_DRAW=0
ZOOM_FLAG=0
ZOOM_SCALE=1.5
ZOOM_SCALE_N=1
ZOOM_SCALE_D=2
ZOOM_IN_D=2
ZOOM_MOVE_D=2
ZOOM_OUT_D=2
ZOOM_IN_WAIT=15
ZOOM_MOVE_WAIT=15
ZOOM_OUT_WAIT=15
ZOOM_USE_OMP=1
ZOOM_SCALE_TYPE=2
ZOOM_SCALE_FILTER=3
NDIONEFRAMEDURATION=50
SDIONEFRAMEDURATION=50
RECORDFLAG=1
OPENOMP=0
OPENOMP=1
TIMEOUTFRAMES=250
DELEYSDINUMS=0
REPLAY_START_FORWARD=50
REPLAY_END_DELEY=50
SDIOUTPUTWAITNUMS=5
CROPRECORD=0
CROP_X=400
CROP_Y=1800
CROP_DIRECTION=3
UI_UDP_PORT=8100
UI_IP_ADDR=127.0.0.1
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment