Commit e90327f6 by wangguotao

1.支持裁切 2.动出贴图 3.replay解决操作人员时长 4.其他优化

parent 345d99bc
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -114,8 +114,8 @@
<widget class="QCheckBox" name="deviceLabelCheckBox">
<property name="geometry">
<rect>
<x>10</x>
<y>40</y>
<x>250</x>
<y>10</y>
<width>171</width>
<height>20</height>
</rect>
......@@ -127,8 +127,8 @@
<widget class="QCheckBox" name="timecodeCheckBox">
<property name="geometry">
<rect>
<x>10</x>
<y>50</y>
<x>160</x>
<y>10</y>
<width>87</width>
<height>20</height>
</rect>
......@@ -286,6 +286,34 @@
<number>34</number>
</property>
</widget>
<widget class="QLineEdit" name="showPathEdit">
<property name="geometry">
<rect>
<x>10</x>
<y>60</y>
<width>240</width>
<height>31</height>
</rect>
</property>
</widget>
<widget class="QPushButton" name="openFile">
<property name="geometry">
<rect>
<x>280</x>
<y>60</y>
<width>80</width>
<height>31</height>
</rect>
</property>
<property name="font">
<font>
<pointsize>8</pointsize>
</font>
</property>
<property name="text">
<string>打开文件</string>
</property>
</widget>
</widget>
</widget>
</widget>
......
......@@ -65,7 +65,7 @@
<ClCompile>
<AdditionalIncludeDirectories>.\ThirdParty\stb_image\;.\ThirdParty\rabbitmq\include;.\ThirdParty\ffmpeg-master-latest-win64-gpl-shared\include;.\ThirdParty\libyuv\include;.\ThirdParty\OpenCV\include;.\ThirdParty\NewTek\include;.\ThirdParty\BlackmagicDesign\include;.\include;%(AdditionalIncludeDirectories);$(Qt_INCLUDEPATH_)</AdditionalIncludeDirectories>
<ShowIncludes>false</ShowIncludes>
<PreprocessorDefinitions>WIN32;_WINSOCKAPI_;AMQP_STATIC;HAVE_CONFIG_H;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<PreprocessorDefinitions>WIN32;_WINSOCKAPI_;HAVE_CONFIG_H;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<OpenMPSupport>true</OpenMPSupport>
</ClCompile>
<Link>
......@@ -76,7 +76,7 @@
<ItemDefinitionGroup Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<ClCompile>
<AdditionalIncludeDirectories>.\ThirdParty\stb_image\;.\ThirdParty\rabbitmq\include;.\ThirdParty\ffmpeg-master-latest-win64-gpl-shared\include;.\ThirdParty\libyuv\include;.\ThirdParty\OpenCV\include;.\ThirdParty\NewTek\include;.\ThirdParty\BlackmagicDesign\include;.\include;%(AdditionalIncludeDirectories);$(Qt_INCLUDEPATH_)</AdditionalIncludeDirectories>
<PreprocessorDefinitions>WIN32;_WINSOCKAPI_;AMQP_STATIC;HAVE_CONFIG_H;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<PreprocessorDefinitions>WIN32;_WINSOCKAPI_;HAVE_CONFIG_H;%(PreprocessorDefinitions)</PreprocessorDefinitions>
<OpenMPSupport>true</OpenMPSupport>
<ConformanceMode>Default</ConformanceMode>
<IntrinsicFunctions>true</IntrinsicFunctions>
......@@ -136,13 +136,17 @@
<QtMoc Include="include\Threads\ReplayThread.h" />
<QtMoc Include="include\Network\UdpServer.h" />
<QtMoc Include="include\Network\UdpSend.h" />
<QtMoc Include="include\BlackMagicDesign\OpenFile.h" />
<ClInclude Include="include\Record\Record.h" />
<ClInclude Include="include\Record\RecordStore.h" />
<ClInclude Include="include\Record\RecordThread.h" />
<QtMoc Include="include\Threads\ZoomThread.h" />
<QtMoc Include="include\Threads\DecodeMaskThread.h" />
<QtMoc Include="include\Threads\CropThread.h" />
<ClInclude Include="include\Utils\Algorithm.h" />
<ClInclude Include="include\Utils\AudioConvert.h" />
<ClInclude Include="include\Utils\Base64.h" />
<ClInclude Include="include\Utils\Computer.h" />
<ClInclude Include="include\Utils\FastMemcpy.h" />
<ClInclude Include="include\Utils\FastMemcpy_Avx.h" />
<ClInclude Include="include\Utils\MaskBuffer.h" />
......@@ -223,6 +227,7 @@
<ClCompile Include="src\BlackMagicDesign\DeckLinkPreviewOverlay.cpp" />
<ClCompile Include="src\BlackMagicDesign\DeckLinkInputVideoFrame.cpp" />
<ClCompile Include="src\BlackMagicDesign\DeckLinkPreviewVideoFrame.cpp" />
<ClCompile Include="src\BlackMagicDesign\OpenFile.cpp" />
<ClCompile Include="src\BlackMagicDesign\ProfileCallback.cpp" />
<ClCompile Include="src\BlackMagicDesign\ScreenPreviewCallback.cpp" />
<ClCompile Include="src\main.cpp" />
......@@ -232,6 +237,8 @@
<ClCompile Include="src\Record\RecordStore.cpp" />
<ClCompile Include="src\Record\RecordThread.cpp" />
<ClCompile Include="src\Threads\ConsumerMqThread.cpp" />
<ClCompile Include="src\Threads\CropThread.cpp" />
<ClCompile Include="src\Threads\DecodeMaskThread.cpp" />
<ClCompile Include="src\Threads\ProcessMaskThread.cpp" />
<ClCompile Include="src\Threads\ReplayThread.cpp" />
<ClCompile Include="src\Threads\ZoomThread.cpp" />
......@@ -247,6 +254,7 @@
<ClCompile Include="src\Utils\AudioPacket.cpp" />
<ClCompile Include="src\Utils\AVBuffer.cpp" />
<ClCompile Include="src\Utils\Common.cpp" />
<ClCompile Include="src\Utils\Computer.cpp" />
<ClCompile Include="src\Utils\Image.cpp" />
<ClCompile Include="src\Utils\LatencyStatistics.cpp" />
<ClCompile Include="src\Utils\MiniDumper.cpp" />
......
......@@ -131,6 +131,15 @@
<QtMoc Include="include\Network\UdpSend.h">
<Filter>Header Files\Network</Filter>
</QtMoc>
<QtMoc Include="include\Threads\DecodeMaskThread.h">
<Filter>Header Files\Threads</Filter>
</QtMoc>
<QtMoc Include="include\Threads\CropThread.h">
<Filter>Header Files\Threads</Filter>
</QtMoc>
<QtMoc Include="include\BlackMagicDesign\OpenFile.h">
<Filter>Header Files\BlackMagicDesign</Filter>
</QtMoc>
</ItemGroup>
<ItemGroup>
<ClInclude Include="include\stdafx.h">
......@@ -304,6 +313,9 @@
<ClInclude Include="include\Utils\FastMemcpy.h">
<Filter>Header Files\Utils</Filter>
</ClInclude>
<ClInclude Include="include\Utils\Computer.h">
<Filter>Header Files\Utils</Filter>
</ClInclude>
</ItemGroup>
<ItemGroup>
<QtRcc Include="Form\MomentaMedia.qrc">
......@@ -430,6 +442,18 @@
<ClCompile Include="src\Network\UdpSend.cpp">
<Filter>Source Files\Network</Filter>
</ClCompile>
<ClCompile Include="src\Threads\DecodeMaskThread.cpp">
<Filter>Source Files\Threads</Filter>
</ClCompile>
<ClCompile Include="src\Utils\Computer.cpp">
<Filter>Source Files\Utils</Filter>
</ClCompile>
<ClCompile Include="src\Threads\CropThread.cpp">
<Filter>Source Files\Threads</Filter>
</ClCompile>
<ClCompile Include="src\BlackMagicDesign\OpenFile.cpp">
<Filter>Source Files\BlackMagicDesign</Filter>
</ClCompile>
</ItemGroup>
<ItemGroup>
<QtUic Include="Form\TimePlus.ui">
......
......@@ -5,9 +5,9 @@
<DebuggerFlavor>WindowsLocalDebugger</DebuggerFlavor>
</PropertyGroup>
<PropertyGroup Label="QtSettings" Condition="'$(Configuration)|$(Platform)'=='Debug|x64'">
<QtLastBackgroundBuild>2023-11-23T07:42:34.1228918Z</QtLastBackgroundBuild>
<QtLastBackgroundBuild>2024-05-28T05:38:18.7905221Z</QtLastBackgroundBuild>
</PropertyGroup>
<PropertyGroup Label="QtSettings" Condition="'$(Configuration)|$(Platform)'=='Release|x64'">
<QtLastBackgroundBuild>2023-04-20T07:02:25.8671702Z</QtLastBackgroundBuild>
<QtLastBackgroundBuild>2024-05-28T05:38:18.8635402Z</QtLastBackgroundBuild>
</PropertyGroup>
</Project>
\ No newline at end of file
......@@ -101,4 +101,6 @@ private:
int Index;
qint32 PrewState;
Listener* listener_{ NULL };
QThread* udp_thread{NULL};
};
\ No newline at end of file
......@@ -60,10 +60,13 @@ public:
ComPtr<IDeckLinkOutput> getDeckLinkOutput(void) const { return deckLinkOutput; }
void SetDeleyTime(qint32& deleyTime);
void SetSendSdiParams(SendSdiParams params);
void ClearFrontQueue();
void AddPicFrame(std::shared_ptr<VideoFrameWithMask> frame);
public slots:
void AddAudioFrame(std::shared_ptr<AudioPacket> audio_packet);
void AddFrame(std::shared_ptr<Image> image);
void AddVideoFrameMask(std::shared_ptr<VideoFrameWithMask> frame);
void AddFrontVideoFrame(std::shared_ptr<VideoFrameWithMask> frame);
void AddZoomFrame(std::shared_ptr<VideoFrameWithMask> frame);
signals:
......@@ -99,6 +102,8 @@ private:
SampleQueue<std::shared_ptr<VideoFrameWithMask>> outputMaskVideoFrameQueue;
SampleDeque<std::shared_ptr<VideoFrameWithMask>> outputMaskVideoFrameDeque;
SampleQueue<std::shared_ptr<VideoFrameWithMask>> zoomVideoFrameQueue;
SampleQueue<std::shared_ptr<VideoFrameWithMask>> FrontMaskVideoFrameQueue;
SampleQueue<std::shared_ptr<VideoFrameWithMask>> PicFrameQueue;
//ScheduledFramesList scheduledFramesList;
//
uint32_t videoPrerollSize;
......@@ -152,6 +157,9 @@ private:
qint64 send_sdi_start_time{0};
std::mutex sdi_clear_mutex;
std::atomic_bool send_zoom_frame_flag{ false };
std::atomic_bool recv_end_back_flag{false};
uint32_t recv_back_num{0};
qint64 last_start_tm{0};
SafeMap<qint64, std::shared_ptr<VideoFrameWithMask>> output_video_frame_map;
};
......@@ -13,6 +13,7 @@
#include "Threads/ProcessThread.h"
#include "Threads/ProcessMaskThread.h"
#include "Threads/ConsumerMqThread.h"
#include "Threads/DecodeMaskThread.h"
class DeckLinkOutputPage : public QWidget,
......@@ -70,12 +71,13 @@ public slots:
void BlackHeightFinish();
void VerifyClicked();
void PrewChanged(int state);
void ForwardPicData(std::shared_ptr<VideoFrameWithMask> frame);
signals:
void RequestDeckLink(ComPtr<IDeckLink>& device);
void RequestDeckLinkIfAvailable(ComPtr<IDeckLink>& device);
void RelinquishDeckLink(ComPtr<IDeckLink>& device);
void PushMask(std::shared_ptr<MaskBuffer> buffer);
//void PushMask(std::shared_ptr<MaskBuffer> buffer);
void PushAttribute(std::shared_ptr<SportAttribute> attr);
private:
void RestartOutput(void);
......@@ -90,8 +92,9 @@ private:
std::shared_ptr<ZoomThread> Zoom;
qint32 Index;
std::unique_ptr<NDIOutputThread> NDIOutput;
std::shared_ptr<ConsumerMqThread> MqThread;
std::unique_ptr<NDIOutputThread> NDIOutput{nullptr};
std::shared_ptr<ConsumerMqThread> MqThread{ nullptr };
std::unique_ptr<DecodeMaskThread> DecodeThread{ nullptr };
ComPtr<DeckLinkOutputDevice> SelectedDevice;
DeckLinkOpenGLWidget* PreviewView;
......
#pragma once
#include <qobject.h>
#include "Utils/Common.h"
class COpenFile : public QObject
{
Q_OBJECT
public:
COpenFile(QObject* parent = nullptr);
~COpenFile();
public slots:
void doOpenFile();
signals:
void tellFilePath(const QString& path);
void SendPicData(std::shared_ptr<VideoFrameWithMask> frame);
private:
QString path;
};
......@@ -61,6 +61,7 @@ private:
SampleQueue<std::shared_ptr<videoFrameData>> VideoQueue;
SampleQueue<std::shared_ptr<videoFrameData>> VideoFilterQueue;
SampleQueue<std::shared_ptr<VideoFrameWithMask>> VideoMaskQueue;
SampleQueue<std::shared_ptr<VideoFrameWithMask>> VideoMaskBackQueue;
SampleQueue<std::shared_ptr<AudioPacket>> audioQueue;
//std::queue<ComPtr<IDeckLinkVideoInputFrame>> taskQueue;
SampleQueue<std::shared_ptr<Image>> taskQueue;
......@@ -93,4 +94,6 @@ private:
bool scale_to_hd;
qint64 normal_last_timestamp{0};
uint64_t last_recv_ts{ 0 };
qint32 recv_back_num{0};
bool recv_back_flag{false};
};
\ No newline at end of file
......@@ -7,9 +7,10 @@
extern "C" {
#include "amqp_time.h"
}
#include "amqp.h"
#include "rabbitmq-c/tcp_socket.h"
}
//#ifdef _WIN32
//#include <WinSock2.h>
......@@ -38,6 +39,7 @@ private:
bool setup_connection_and_channel();
bool close_and_destroy_connection();
void read_msg();
void read_consume_msg();
private:
std::string mq_queue_name;
std::string mq_exchange_name;
......
#pragma once
#include <QThread>
#include "Utils/SampleQueue.h"
class CropThread : public QThread
{
Q_OBJECT
public:
class Listener
{
public:
virtual void OnRecvViedoFrame(std::shared_ptr<VideoFrameWithMask>) = 0;
virtual void OnChange() = 0;
};
public:
CropThread(Listener* listener, const CropMessage& crop_msg);
CropThread(Listener* listener);
~CropThread();
public:
void addVideoFrame(std::shared_ptr<VideoFrameWithMask> frame);
void addCropMsg(const CropMessage& msg);
bool reCropRes();
protected:
void run();
private:
void CropScale();
private:
SampleQueue<std::shared_ptr<VideoFrameWithMask>> crop_queue;
SampleQueue<CropMessage> crop_msg_queue;
CropMessage crop_msg;
uint8_t* crop_data{ NULL };
uint8_t* i422_4k_data{ NULL };
uint8_t* uyvy_crop_data{ NULL };
uint8_t* i422_crop_data{ NULL };
Listener* p_listener{ NULL };
};
\ No newline at end of file
#pragma once
#include <QThread>
#include "Utils/MaskBuffer.h"
#include "Utils/SampleQueue.h"
class DecodeMaskThread : public QThread
{
Q_OBJECT
public:
DecodeMaskThread() {}
~DecodeMaskThread() {}
signals:
void PushMask(std::shared_ptr<MaskBuffer> buffer);
public:
void addData(const QJsonObject& obj);
private:
void run() override;
private:
SampleQueue<QJsonObject> queue;
};
\ No newline at end of file
......@@ -7,8 +7,9 @@
#include "Record/RecordStore.h"
#include "ZoomThread.h"
#include "Utils/Algorithm.h"
#include "CropThread.h"
class ProcessMaskThread : public QThread
class ProcessMaskThread : public QThread, public CropThread::Listener
{
Q_OBJECT
public:
......@@ -20,11 +21,13 @@ public slots:
void AddFrame(std::shared_ptr<videoFrameData> frame_data);
signals:
void PushFrame(std::shared_ptr<VideoFrameWithMask> image);
void PushFrameToReplay(std::shared_ptr<VideoFrameWithMask> image);
public:
void ClearQueue();
void SetRecordStore(RecordStore* store) { p_store = store; }
void SetZoomThread(std::shared_ptr<ZoomThread> thread) { zoom_thread = thread; }
void StartRecord(const uint64_t& start_time, const uint64_t& end_time);
CropThread* GetCropThread();
protected:
enum MaskStatus
......@@ -34,6 +37,8 @@ protected:
MS_END
};
private:
void OnRecvViedoFrame(std::shared_ptr<VideoFrameWithMask>) override;
void OnChange() override;
void run() override;
void process();
void workMask(const std::shared_ptr<videoFrameData>& frame_data, std::shared_ptr<MaskBuffer> buffer,const bool& mask_flag);
......@@ -47,6 +52,7 @@ private:
void outputAlphaRect(std::shared_ptr<MaskedBuffer> current,const Rect& in_cross, Rect& out_cross);
void StartRecord_(const uint32_t& w,const uint32_t& h, const int32_t& fmt,const uint64_t& start_time,const uint64_t& end_time,const std::string& path);
void CropScale();
void CropScaleWithUYVY();
private:
//SampleQueue<MaskBuffer> mask_queue;
......@@ -55,7 +61,6 @@ private:
std::map<qint64, std::shared_ptr<MaskedBuffer>> tga_masked_map;
std::map<qint64, std::shared_ptr<MaskedBuffer>> store_masked_map;
SampleQueue<std::shared_ptr<videoFrameData>> taskImageQueue;
SampleQueue<std::shared_ptr<VideoFrameWithMask>> crop_queue;
bool once_save{ true };
uint8_t* tmp_bgra{NULL};
......@@ -74,6 +79,7 @@ private:
//SampleQueue<std::shared_ptr<MaskedBuffer>> masked_queue;
std::shared_ptr<ZoomThread> zoom_thread{nullptr};
std::shared_ptr<CropThread> crop_thread{ nullptr };
Rect mask_rect{0,0,0,0};
int m_fps{0};
......@@ -84,7 +90,14 @@ private:
qint64 start_time{0};
uint64_t last_recv_ts{0};
std::thread crop_thread;
/*std::thread crop_thread;
CropMessage crop_msg;
uint8_t* crop_data{NULL};
uint8_t* i422_4k_data{NULL};
uint8_t* uyvy_crop_data{ NULL };
uint8_t* i422_crop_data{ NULL };
SampleQueue<std::shared_ptr<VideoFrameWithMask>> crop_queue;*/
CropMessage crop_msg;
qint32 dynamic_out_num {0};
bool dynamic_flag{false};
};
\ No newline at end of file
......@@ -4,9 +4,10 @@
#include <QThread>
#include <mutex>
#include <queue>
#include <map>
#include "Utils/SafeMap.h"
#include "CropThread.h"
class ReplayThread : public QThread
class ReplayThread : public QThread,public CropThread::Listener
{
Q_OBJECT
public:
......@@ -14,29 +15,49 @@ public:
~ReplayThread();
public slots:
void addFrame(std::shared_ptr<videoFrameData> frameData);
void addCropFrame(std::shared_ptr<VideoFrameWithMask> frame);
signals:
//void PushFrame(std::shared_ptr<Image> image);
void PushFrame(std::shared_ptr<videoFrameData> frameData);
void PushFrameForSdi(std::shared_ptr<VideoFrameWithMask> frameData);
public:
bool CanReplay(const ReplayParams& params);
void recvReplayParams(const ReplayParams& params);
CropThread* GetCropThread();
protected:
void run() override;
private:
void OnRecvViedoFrame(std::shared_ptr<VideoFrameWithMask> crop_frame) override;
void OnChange() override;
void SendFrameFunc();
private:
std::mutex mutex;
std::condition_variable cv;
ReplayParams replay_params;
std::map<uint64_t, std::shared_ptr<videoFrameData>> storeVideoMap;
std::map<qint64, std::shared_ptr<videoFrameData>> storeVideoMap;
std::map<qint64, std::shared_ptr<videoFrameData>> storeBackVideoMap;
std::vector<std::shared_ptr<videoFrameData>> replayVideoVec;
std::vector<std::shared_ptr<VideoFrameWithMask>> replayVideoSdiVec;
std::vector<std::shared_ptr<videoFrameData>> replayVideoCropVec;
//std::vector<std::shared_ptr<VideoFrameWithMask>> replayCropVideoSdiVec;
/*std::queue<std::shared_ptr<videoFrameData>> replayVideoQueue1;
std::queue<std::shared_ptr<videoFrameData>> replayVideoQueue2;*/
uint32_t max_store_size;
uint32_t max_store_crop_size;
uint32_t max_store_back_size;
std::atomic_bool replay_flag{ false };
std::atomic<ReplayStatus> replay_status{ RS_IDEL };
uint32_t interval;
ReplayParams last_replay_params;
uint32_t replay_position{ 0 };
qint64 current_seq{0};
std::atomic_int32_t replay_position{ 0 };
std::atomic_int32_t replay_sdi_position{ 0 };
std::atomic_int64_t current_seq{0};
bool send_err_flag {false};
std::shared_ptr<CropThread> crop_thread{ nullptr };
SafeMap<qint64, std::shared_ptr<VideoFrameWithMask>> storeCropMap;
SampleQueue<std::shared_ptr<videoFrameData>> needCropQueue;
std::thread send_frame_thread;
};
\ No newline at end of file
......@@ -14,6 +14,7 @@
#include "BlackMagicDesign/ProfileCallback.h"
#include "DeckLinkAPI.h"
#include "BlackMagicDesign/OpenFile.h"
#include "ui_TimePlus.h"
......@@ -50,7 +51,9 @@ public slots:
void RequestOutputDevice(DeckLinkOutputPage* page, ComPtr<IDeckLink>& deckLink);
void RequestOutputDeviceIfAvailable(DeckLinkOutputPage* page, ComPtr<IDeckLink>& deckLink);
void RelinquishOutputDevice(ComPtr<IDeckLink>& device);
void OnWritePath(const QString& path);
signals:
void StartOpenFile();
private slots:
void DeviceLabelEnableChanged(bool enabled);
void TimecodeEnableChanged(bool enabled);
......@@ -59,8 +62,10 @@ private slots:
void OutputModeChanged(int selectIndex);
void PicAspectChanged(int selectIndex);
void AudioChannelChanged(int selectIndex);
void ShowFile();
private:
void ReadSettings();
void ReadPic(DeckLinkOutputPage*);
private:
Ui::MomentaMediaClass ui;
......@@ -69,6 +74,7 @@ private:
ComPtr<DeckLinkDeviceDiscovery> DeckLinkDiscovery;
ProfileCallback* pProfileCallback;
QLineEdit* PicPathEdit;
QLineEdit* DeleyTimeEdit;
QComboBox* AudioChannelCombo;
QComboBox* OutputModeCombo;
......@@ -82,4 +88,6 @@ private:
std::array<std::shared_ptr<ProcessMaskThread>, kPreviewDevicesCount> ProcessMaskThreads;
std::map<ComPtr<IDeckLink>, DeviceState> InputDevices;
std::map<ComPtr<IDeckLink>, DeviceState> OutputDevices;
QThread* file_thread;
COpenFile* open_file{ NULL };
};
......@@ -76,7 +76,8 @@ public:
unsigned char char_array_4[4], char_array_3[3];
std::string ret;
while (in_len-- && (encoded_string[in_] != '=') && is_base64(encoded_string[in_])) {
while (in_len-- && (encoded_string[in_] != '=') && is_base64(encoded_string[in_]))
{
char_array_4[i++] = encoded_string[in_]; in_++;
if (i == 4) {
for (i = 0; i < 4; i++)
......
......@@ -43,10 +43,12 @@ enum ReplayStatus
enum BlendStatus
{
BS_IDEL = -1,
BS_ZOOM,
BS_RP_START_FRONT,
BS_START = 1,
BS_ING,
BS_END
BS_END,
BS_RP_END_BACK,
BS_ZOOM,
};
enum FileType
......@@ -93,8 +95,8 @@ static uint64_t TimeMilliSecond()
typedef struct Point
{
uint32_t x;
uint32_t y;
qint32 x;
qint32 y;
}Point;
struct SendSdiParams
......@@ -123,6 +125,148 @@ struct HDRMetadata {
enum class EOTF { SDR = 0, HDR = 1, PQ = 2, HLG = 3 };
class CropMessage
{
public:
CropMessage() :crop_x(0), crop_y(0), crop_w(K4WIDTH), crop_h(K4HEIGHT), crop_direction(CD_LEFT_TOP) {}
CropMessage(const int32_t& x, const int32_t& y, const int32_t& w, const int32_t& h) :crop_x(x), crop_y(y), crop_w(w), crop_h(h), crop_direction(CD_LEFT_TOP)
{
crop_x = (crop_x >> 1 << 1);
crop_w = (crop_w >> 2 << 2);
}
CropMessage(const int32_t& x, const int32_t& y, const CropDirection& direction)
{
crop_x = x;
crop_y = y;
uint32_t crop_max_w = 0;
uint32_t crop_max_h = 0;
crop_direction = direction;
switch (crop_direction)
{
case CD_LEFT_TOP:
crop_max_w = K4WIDTH - crop_x;
crop_max_h = K4HEIGHT - crop_y;
break;
case CD_RIGHT_TOP:
crop_max_w = crop_x;
crop_max_h = K4HEIGHT - crop_y;
break;
case CD_LEFT_LOW:
crop_max_w = K4WIDTH - crop_x;
crop_max_h = crop_y;
break;
case CD_RIGHT_LOW:
crop_max_w = crop_x;
crop_max_h = crop_y;
break;
default:
break;
}
crop_w = crop_max_w;
crop_h = crop_w * 9 / 16;
if (crop_h > crop_max_h)
{
crop_h = crop_max_h;
crop_w = crop_h * 16 / 9;
}
switch (crop_direction)
{
case CD_LEFT_TOP:
break;
case CD_RIGHT_TOP:
crop_x = crop_max_w - crop_w;
break;
case CD_LEFT_LOW:
crop_y = crop_max_h - crop_h;
break;
case CD_RIGHT_LOW:
crop_x = crop_max_w - crop_w;
crop_y = crop_max_h - crop_h;
break;
default:
break;
}
crop_x = (crop_x >> 1 << 1);
crop_w = (crop_w >> 2 << 2);
}
CropMessage::CropMessage(const CropMessage& other)
{
crop_x = other.crop_x;
crop_y = other.crop_y;
crop_w = other.crop_w;
crop_h = other.crop_h;
crop_direction = other.crop_direction;
}
CropMessage::CropMessage(CropMessage&& other)
{
crop_x = other.crop_x;
crop_y = other.crop_y;
crop_w = other.crop_w;
crop_h = other.crop_h;
crop_direction = other.crop_direction;
}
CropMessage& operator=(const CropMessage& other)
{
crop_x = other.crop_x;
crop_y = other.crop_y;
crop_w = other.crop_w;
crop_h = other.crop_h;
crop_direction = other.crop_direction;
return *this;
}
CropMessage& operator=(CropMessage&& other)
{
crop_x = other.crop_x;
crop_y = other.crop_y;
crop_w = other.crop_w;
crop_h = other.crop_h;
crop_direction = other.crop_direction;
return *this;
}
bool equal(const CropMessage& other)
{
if (other.crop_x == crop_x && other.crop_y == crop_y && other.crop_w == crop_w && other.crop_h == crop_h)
{
return true;
}
return false;
}
bool check()
{
int32_t crop_max_w = K4WIDTH - crop_x;
int32_t crop_max_h = K4HEIGHT - crop_y;
if (crop_w > crop_max_w || crop_h > crop_max_h) return false;
else return true;
}
bool checkCrop()
{
if (0 == crop_x && 0 == crop_y && crop_w == K4WIDTH && K4HEIGHT == crop_h)
{
return false;
}
else return true;
}
int32_t crop_x;
int32_t crop_y;
uint32_t crop_w;
uint32_t crop_h;
CropDirection crop_direction;
};
typedef struct videoFrameData
{
videoFrameData() {}
......@@ -196,6 +340,7 @@ typedef struct videoFrameData
bool zoom_last = false;
bool repeat = false;
BMDPixelFormat fmt{ bmdFormatUnspecified };
BlendStatus flag_ = BS_IDEL;
qint32 width{ 0 };
qint32 height{ 0 };
qint32 size{ 0 };
......@@ -217,6 +362,13 @@ typedef struct VideoFrameWithMask
VideoFrameWithMask() {}
VideoFrameWithMask(const uint32_t& width, const uint32_t& height, uint8_t* data, const BMDPixelFormat& fmt, const BlendStatus& status, const HDRMetadata& meta) :
width_(width), height_(height), data_(data), fmt_(fmt), flag_(status), meta_(meta)
{
if (fmt_ == bmdFormat8BitBGRA) size_ = width_ * height_ << 2;
else if (fmt_ == bmdFormat8BitYUV) size_ = width_ * height_ << 1;
}
VideoFrameWithMask(const uint32_t& width, const uint32_t& height, const qint64& ts, const qint64& seq, uint8_t* data, const BlendStatus& status, const HDRMetadata& meta):
width_(width),height_(height),timestamp_(ts),sequenceNum_(seq),data_(data),flag_(status),meta_(meta)
{
......@@ -229,6 +381,13 @@ typedef struct VideoFrameWithMask
size_ = width_ * height_ << 2;
}
VideoFrameWithMask(const uint32_t& width, const uint32_t& height, const qint64& ts, const qint64& seq, uint8_t* data, const BMDPixelFormat& fmt, const BlendStatus& status, const HDRMetadata& meta) :
width_(width), height_(height), timestamp_(ts), sequenceNum_(seq), data_(data), fmt_(fmt), flag_(status), meta_(meta)
{
if (fmt_ == bmdFormat8BitBGRA) size_ = width_ * height_ << 2;
else if (fmt_ == bmdFormat8BitYUV) size_ = width_ * height_ << 1;
}
VideoFrameWithMask(const uint32_t& width, const uint32_t& height, const qint64& ts, const qint64& seq, uint8_t* data, const BMDPixelFormat& fmt, const BlendStatus& status, const HDRMetadata& meta, const qint64& start_tm) :
width_(width), height_(height), timestamp_(ts), sequenceNum_(seq), data_(data),fmt_(fmt), flag_(status),meta_(meta), start_tm_(start_tm)
{
......@@ -321,6 +480,7 @@ typedef struct VideoFrameWithMask
bool zoom_last = false;
bool mask_flag = true;
bool replay_flag = false;
uint8_t* data_ = NULL;
std::shared_ptr<videoFrameData> pImage = nullptr;
BMDPixelFormat fmt_ = bmdFormat8BitBGRA;
......@@ -332,96 +492,42 @@ typedef struct VideoFrameWithMask
qint64 timestamp_;
qint64 start_tm_ = 0;
HDRMetadata meta_;
CropMessage crop_msg;
}VideoFrameWithMask;
class CropMessage
struct PicFrameData
{
public:
CropMessage() :crop_x(0), crop_y(0), crop_w(0), crop_h(0) {}
CropMessage(const int32_t& x,const int32_t& y,const CropDirection& direction)
{
crop_x = x;
crop_y = y;
uint32_t crop_max_w = 0;
uint32_t crop_max_h = 0;
crop_direction = direction;
switch (crop_direction)
{
case CD_LEFT_TOP:
crop_max_w = K4WIDTH - crop_x;
crop_max_h = K4HEIGHT - crop_y;
break;
case CD_RIGHT_TOP:
crop_max_w = crop_x;
crop_max_h = K4HEIGHT - crop_y;
break;
case CD_LEFT_LOW:
crop_max_w = K4WIDTH - crop_x;
crop_max_h = crop_y;
break;
case CD_RIGHT_LOW:
crop_max_w = crop_x;
crop_max_h = crop_y;
break;
default:
break;
}
using PicFramePtr = std::shared_ptr<PicFrameData>;
crop_w = crop_max_w;
crop_h = crop_w * 9 / 16;
if (crop_h > crop_max_h)
PicFrameData(const qint32& width,const qint32& height,const BMDPixelFormat& fmt)
{
fmt_ = fmt;
width_ = width;
height_ = height;
if (fmt_ == bmdFormat8BitYUV)
{
crop_h = crop_max_h;
crop_w = crop_h * 16 / 9;
size_ = (width_ * height_ << 1);
}
switch (crop_direction)
else if (fmt_ == bmdFormat8BitBGRA || fmt_ == bmdFormat8BitARGB)
{
case CD_LEFT_TOP:
break;
case CD_RIGHT_TOP:
crop_x = crop_max_w - crop_w;
break;
case CD_LEFT_LOW:
crop_y = crop_max_h - crop_h;
break;
case CD_RIGHT_LOW:
crop_x = crop_max_w - crop_w;
crop_y = crop_max_h - crop_h;
break;
default:
break;
size_ = (width_ * height_ << 2);
}
data_ = new uint8_t[size_];
}
CropMessage& operator=(const CropMessage& other)
~PicFrameData()
{
crop_x = other.crop_x;
crop_y = other.crop_y;
crop_w = other.crop_w;
crop_h = other.crop_h;
crop_direction = other.crop_direction;
return *this;
if (data_) delete data_;
}
CropMessage& operator=(CropMessage&& other)
{
crop_x = other.crop_x;
crop_y = other.crop_y;
crop_w = other.crop_w;
crop_h = other.crop_h;
crop_direction = other.crop_direction;
return *this;
}
uint32_t crop_x;
uint32_t crop_y;
uint32_t crop_w;
uint32_t crop_h;
CropDirection crop_direction;
BMDPixelFormat fmt_{ bmdFormatUnspecified };
uint8_t* data_{ NULL };
qint32 width_{ 0 };
qint32 height_{ 0 };
uint32_t size_{ 0 };
};
static const HDRMetadata kDefaultHLGBT2020HDRMetadata = { static_cast<INT64>(EOTF::HLG), 0.708, 0.292, 0.170, 0.797, 0.131, 0.046, 0.3127, 0.3290, 1000.0, 0.0001, 1000.0, 50.0, bmdColorspaceRec2020 };
static const HDRMetadata kDefaultSDRBT709HDRMetadata = { static_cast<INT64>(EOTF::SDR), 0.64, 0.33, 0.3, 0.6, 0.15, 0.06, 0.3127, 0.329, 10, 0.0001, 1000.0, 30, bmdColorspaceRec709 };
......@@ -429,9 +535,11 @@ typedef struct ReplayParams
{
/*uint64_t timecode{0};
uint32_t durations{0};*/
ReplayStatus status{ RS_IDEL };
uint64_t start_time{0};
uint64_t end_time{ 0 };
ReplayParams() {}
std::atomic<ReplayStatus> status{ RS_IDEL };
qint64 start_time{0};
qint64 end_time{ 0 };
}ReplayParams;
......
#pragma once
#include <QString>
class Computer
{
public:
static QString GetIPv4();
};
\ No newline at end of file
......@@ -124,8 +124,8 @@ struct MaskBuffer
Point upper_left_point;
Point lower_right_point;
uint32_t width{0};
uint32_t height{0};
qint32 width{0};
qint32 height{0};
int32_t signal{-1};//-1-- 0--start 1--stop
//QByteArray mask_data ;
std::string mask_data{""};
......
......@@ -51,6 +51,15 @@ public:
else return false;
}
bool Pop()
{
std::unique_lock<std::mutex> lock(mutex_);
if (map_.empty()) return false;
auto key = map_.begin()->first;
map_.erase(key);
return true;
}
bool Pop(K& key, V& value)
{
std::unique_lock<std::mutex> lock(mutex_);
......
......@@ -19,6 +19,7 @@ public:
void Push(T&& sample);
void PushBack(const T& sample);
bool Pop(T& sample);
bool Pop(int size);
bool Pop();
bool PopBack(T& sample);
bool PopBack();
......@@ -95,6 +96,17 @@ bool SampleDeque<T>::Pop(T& sample)
}
template<typename T>
bool SampleDeque<T>::Pop(int size)
{
std::lock_guard<std::mutex> locker(mutex);
if (deque.empty() || deque.size() < size)
return false;
deque.pop_front();
return true;
}
template<typename T>
bool SampleDeque<T>::Pop()
{
std::lock_guard<std::mutex> locker(mutex);
......@@ -242,7 +254,7 @@ template <typename T>
bool SampleDeque<T>::Put(int size, std::vector<T>& vec)
{
std::lock_guard<std::mutex> locker(mutex);
if (deque.empty())
if (deque.empty() || !size)
return false;
int count = 0;
for (auto itor = deque.rbegin(); itor != deque.rend();itor++)
......
......@@ -18,8 +18,10 @@ public:
SampleQueue(const std::string& names);
virtual ~SampleQueue();
void PushOnly(const T& sample);
void Push(const T& sample);
void Push(T&& sample);
bool PopLast(T& sample);
bool Pop(T& sample);
bool Pop();
bool Front(T& sample);
......@@ -61,6 +63,20 @@ SampleQueue<T>::~SampleQueue()
}
template <typename T>
void SampleQueue<T>::PushOnly(const T& sample)
{
{
std::lock_guard<std::mutex> locker(mutex);
while (!queue.empty())
{
queue.pop();
}
queue.push(sample);
}
queueCondition.notify_all();
}
template <typename T>
void SampleQueue<T>::Push(T&& sample)
{
{
......@@ -82,6 +98,20 @@ void SampleQueue<T>::Push(const T& sample)
}
template <typename T>
bool SampleQueue<T>::PopLast(T& sample)
{
std::lock_guard<std::mutex> locker(mutex);
if (queue.empty())
return false;
do {
sample = std::move(queue.front());
queue.pop();
} while (!queue.empty());
return true;
}
template <typename T>
bool SampleQueue<T>::Pop(T& sample)
......@@ -175,7 +205,7 @@ template <typename T>
bool SampleQueue<T>::WaitFor(int size)
{
std::unique_lock<std::mutex> locker(mutex);
queueCondition.wait(locker, [&] {return (!queue.empty() && queue.size()> size) || waitCancelled; });
queueCondition.wait(locker, [&] {return (!queue.empty() && queue.size() >= size) || waitCancelled; });
if (waitCancelled)
return false;
......
#pragma once
#include <iostream>
#include <QString>
//#include <iostream>
//#include <QString>
#include "Utils/Common.h"
class Settings
{
public:
static int32_t ReplayStoreTime ;//单位ms
static int32_t ReplayBackStoreTime;//ms
static int32_t ReplayCropStoreTime;//ms
static int32_t FrontDeleyTime ;//单位ms
static int32_t FrameRate;
static int32_t OutputPlayMode;
static int32_t AudioChannel;
static int32_t ScaleMode ;
//static int32_t ScaleMode ;
static int32_t AspecNum ;
static int32_t AspecDen ;
static int32_t OneFrameDuration ; //FOR NDI
static int32_t SDIOneFrameDuration; //FOR SDI
static std::atomic_int32_t NdiOneFrameDuration; //FOR NDI 静出多少帧数
static std::atomic_int32_t SDIOneFrameDuration; //FOR SDI 静出多少帧数
static std::atomic_int32_t DynamicOut;//DYNAMIC OUT 动出多少帧数
static int32_t MaxDynamicOut;
static int32_t MaxDynamicIn;
static int32_t MaxStaticFrame;
static int32_t RecordStoreDuration ;
static int32_t RecordFlag;
static int32_t OpenOMP ;
static int32_t TimeoutFrames ; //mask 贴图的超时时间 针对如果未收到算法给的结束帧情况使用
static int32_t SecondSdiOutWaitNums; //sdi输出的二次确认功能保留贴图开始的前多少帧
static int32_t SecondFlag;
static int32_t SecondSdiOutWaitNums;//sdi输出的二次确认等待的帧数 越大输出到sdi延迟越大
static int32_t SecondSdiOutWaitNumsFront; //sdi输出的二次确认功能保留贴图开始的前多少帧
static int32_t SecondSdiOutWaitNumsBack;//sdi输出的二次确认功能保留贴图结束后多少帧
static bool HaveBlackDataFlag;
static int32_t DrawFlag;
......@@ -37,17 +52,24 @@ public:
static uint32_t ZoomMoveWaitCnt;
static uint32_t ZoomOutWaitCnt;
static uint32_t ReplayForward;
static uint32_t ReplayDeley;
static std::atomic_int32_t ReplayForward;//replay 动进的帧数
static int32_t ReplayDeley;
static int32_t ReplayForwardForInNDI;
static uint32_t RTSDIOutputWaitNums; //实时输出的缓存帧大小 一般5帧
static int32_t CropFlag;
static std::atomic_bool CropFlag;
static int32_t CropX;
static int32_t CropY;
static int32_t CropDirection;
static int32_t UIUdpPort;
static QString UIIpAddr;
static std::atomic_bool UsePicFlag;
static QString PicPath;
static CropMessage CropMsg;
/*static uint8_t* PicData;
static std::atomic_bool PicFlag;*/
};
......
......@@ -159,7 +159,7 @@ public:
}
static void I422Scale4K(uint8_t* src, const size_t& src_w, const size_t& src_h, uint8_t* dst, const size_t& dst_w, const size_t& dst_h, const int32_t& omp)
static void I422Scale4K(uint8_t* src, const size_t& src_w, const size_t& src_h, uint8_t* dst, const size_t& dst_w, const size_t& dst_h, const int32_t& omp, int32_t filter = 2)
{
size_t half_src_w = (src_w >> 1);
size_t half_src_h = (src_h >> 1);
......@@ -169,6 +169,9 @@ public:
uint32_t src_y_size = src_w * src_h;
uint32_t src_u_size = (src_w * src_h >> 1);
size_t src_mode_h = src_h % 2;
size_t src_mode_w = src_w % 2;
uint8_t* src_y = src;
uint8_t* src_u = src_y + src_y_size;
uint8_t* src_v = src_u + src_u_size;
......@@ -211,6 +214,8 @@ public:
uint8_t* dst_v_3 = dst_v + (dst_w * half_dst_h >> 1);
uint8_t* dst_v_4 = dst_v + (half_dst_w >> 1) + (dst_w * half_dst_h >> 1);
std::vector<WH> src_wh_vec = { { half_src_w, half_src_h },{half_src_w + src_mode_w,half_src_h},{half_src_w,half_src_h + src_mode_h},{half_src_w + src_mode_w,half_src_h + src_mode_h} };
if (omp)
{
uint8_t* src_data[4][3] = { {src_y_1,src_u_1,src_v_1},{src_y_2,src_u_2,src_v_2},{src_y_3,src_u_3,src_v_3},{src_y_4,src_u_4,src_v_4} };
......@@ -231,22 +236,22 @@ public:
uint8_t* dst_u = dst_data[i][1];
uint8_t* dst_v = dst_data[i][2];
libyuv::I422Scale(src_y, src_w, src_u, src_w >> 1, src_v, src_w >> 1, half_src_w, half_src_h,
dst_y, dst_w, dst_u, dst_w >> 1, dst_v, dst_w >> 1, half_dst_w, half_dst_h, libyuv::kFilterBilinear);
libyuv::I422Scale(src_y, src_w, src_u, src_w >> 1, src_v, src_w >> 1, src_wh_vec[i].w, src_wh_vec[i].h,
dst_y, dst_w, dst_u, dst_w >> 1, dst_v, dst_w >> 1, half_dst_w, half_dst_h, (libyuv::FilterMode)filter);
}
}
}
else
{
{
libyuv::I422Scale(src_y_1, src_w, src_u_1, src_w >> 1, src_v_1, src_w >> 1, half_src_w, half_src_h,
dst_y_1, dst_w, dst_u_1, dst_w >> 1, dst_v_1, dst_w >> 1, half_dst_w, half_dst_h, libyuv::kFilterBilinear);
libyuv::I422Scale(src_y_2, src_w, src_u_2, src_w >> 1, src_v_2, src_w >> 1, half_src_w, half_src_h,
dst_y_2, dst_w, dst_u_2, dst_w >> 1, dst_v_2, dst_w >> 1, half_dst_w, half_dst_h, libyuv::kFilterBilinear);
libyuv::I422Scale(src_y_3, src_w, src_u_3, src_w >> 1, src_v_3, src_w >> 1, half_src_w, half_src_h,
dst_y_3, dst_w, dst_u_3, dst_w >> 1, dst_v_3, dst_w >> 1, half_dst_w, half_dst_h, libyuv::kFilterBilinear);
libyuv::I422Scale(src_y_4, src_w, src_u_4, src_w >> 1, src_v_4, src_w >> 1, half_src_w, half_src_h,
dst_y_4, dst_w, dst_u_4, dst_w >> 1, dst_v_4, dst_w >> 1, half_dst_w, half_dst_h, libyuv::kFilterBilinear);
libyuv::I422Scale(src_y_1, src_w, src_u_1, src_w >> 1, src_v_1, src_w >> 1, src_wh_vec[0].w, src_wh_vec[0].h,
dst_y_1, dst_w, dst_u_1, dst_w >> 1, dst_v_1, dst_w >> 1, half_dst_w, half_dst_h, (libyuv::FilterMode)filter);
libyuv::I422Scale(src_y_2, src_w, src_u_2, src_w >> 1, src_v_2, src_w >> 1, src_wh_vec[1].w, src_wh_vec[1].h,
dst_y_2, dst_w, dst_u_2, dst_w >> 1, dst_v_2, dst_w >> 1, half_dst_w, half_dst_h, (libyuv::FilterMode)filter);
libyuv::I422Scale(src_y_3, src_w, src_u_3, src_w >> 1, src_v_3, src_w >> 1, src_wh_vec[2].w, src_wh_vec[2].h,
dst_y_3, dst_w, dst_u_3, dst_w >> 1, dst_v_3, dst_w >> 1, half_dst_w, half_dst_h, (libyuv::FilterMode)filter);
libyuv::I422Scale(src_y_4, src_w, src_u_4, src_w >> 1, src_v_4, src_w >> 1, src_wh_vec[3].w, src_wh_vec[3].h,
dst_y_4, dst_w, dst_u_4, dst_w >> 1, dst_v_4, dst_w >> 1, half_dst_w, half_dst_h, (libyuv::FilterMode)filter);
}
}
......@@ -388,6 +393,131 @@ public:
}
static void UYVYToI422(uint8_t* src, uint8_t* dst, const uint32_t& width, const uint32_t& height, const int32_t& omp = 0)
{
size_t half_w = (width >> 1);
size_t half_h = (height >> 1);
size_t mode_w = width % 2;
size_t mode_h = height % 2;
uint8_t* src1 = src;
size_t src_offset = (half_w << 1);
uint8_t* src2 = src + src_offset;
src_offset = (width * half_h << 1);
uint8_t* src3 = src + src_offset;
src_offset = ((width * half_h << 1) + (half_w << 1));
uint8_t* src4 = src + src_offset;
uint32_t dst_y_size = width * height;
uint32_t dst_u_size = (width * height >> 1);
uint8_t* dst_y = dst;
uint8_t* dst_u = dst_y + dst_y_size;
uint8_t* dst_v = dst_u + dst_u_size;
uint8_t* dst_y_1 = dst_y;
uint8_t* dst_y_2 = dst_y + half_w;
uint8_t* dst_y_3 = dst_y + width * half_h;
uint8_t* dst_y_4 = dst_y + width * half_h + half_w;
uint8_t* dst_u_1 = dst_u;
uint8_t* dst_u_2 = dst_u + (half_w >> 1);
uint8_t* dst_u_3 = dst_u + (width * half_h >> 1);
uint8_t* dst_u_4 = dst_u + (width * half_h >> 1) + (half_w >> 1);
uint8_t* dst_v_1 = dst_v;
uint8_t* dst_v_2 = dst_v + (half_w >> 1);
uint8_t* dst_v_3 = dst_v + (width * half_h >> 1);
uint8_t* dst_v_4 = dst_v + (width * half_h >> 1) + (half_w >> 1);
std::vector<WH> wh_vec = { { half_w, half_h },{half_w + mode_w,half_h},{half_w,half_h + mode_h},{half_w + mode_w,half_h + mode_h} };
if (omp)
{
uint8_t* dst_data[4][3] = { {dst_y_1,dst_u_1,dst_v_1},{dst_y_2,dst_u_2,dst_v_2},{dst_y_3,dst_u_3,dst_v_3},{dst_y_4,dst_u_4,dst_v_4} };
uint8_t* src_data[4] = { src1,src2,src3,src4 };
int thread_num = 4;
omp_set_num_threads(thread_num);
#pragma omp parallel
{
#pragma omp for
for (int i = 0; i < thread_num; i++)
{
uint8_t* dst_y = dst_data[i][0];
uint8_t* dst_u = dst_data[i][1];
uint8_t* dst_v = dst_data[i][2];
uint8_t* src = src_data[i];
libyuv::UYVYToI422(src,width<<1,dst_y,width,dst_u,width >> 1,dst_v,width >>1,wh_vec[i].w,wh_vec[i].h);
}
}
}
{
libyuv::UYVYToI422(src1, width << 1, dst_y_1, width, dst_u_1, width >> 1, dst_v_1, width >> 1, wh_vec[0].w, wh_vec[0].h);
libyuv::UYVYToI422(src2, width << 1, dst_y_2, width, dst_u_2, width >> 1, dst_v_2, width >> 1, wh_vec[1].w, wh_vec[1].h);
libyuv::UYVYToI422(src3, width << 1, dst_y_3, width, dst_u_3, width >> 1, dst_v_3, width >> 1, wh_vec[2].w, wh_vec[2].h);
libyuv::UYVYToI422(src4, width << 1, dst_y_4, width, dst_u_4, width >> 1, dst_v_4, width >> 1, wh_vec[3].w, wh_vec[3].h);
}
}
static void UYVYCopy(uint8_t* src, const size_t& offset_x, const size_t& offset_y, const size_t& src_w, const size_t& src_h,
uint8_t* dst, const size_t& dst_w, const size_t& dst_h, const int32_t& omp = 0)
{
size_t half_dst_w = (dst_w >> 1);
size_t half_dst_h = (dst_h >> 1);
size_t mode_w = dst_w % 2;
size_t mode_h = dst_h % 2;
size_t src1_offset = (offset_y * src_w << 1) + (offset_x << 1);
uint8_t* src1 = src + src1_offset;
size_t src2_offset = (offset_y * src_w << 1) + ((offset_x + half_dst_w) << 1);
uint8_t* src2 = src + src2_offset;
size_t src3_offset = ((offset_y + half_dst_h) * src_w << 1) + (offset_x << 1);
uint8_t* src3 = src + src3_offset;
size_t src4_offset = ((offset_y + half_dst_h) * src_w << 1) + ((offset_x + half_dst_w) << 1);
uint8_t* src4 = src + src4_offset;
uint8_t* dst1 = dst;
size_t dst_offset = (half_dst_w << 1);
uint8_t* dst2 = dst + dst_offset;
dst_offset = (dst_w * half_dst_h << 1);
uint8_t* dst3 = dst + dst_offset;
dst_offset = ((dst_w * half_dst_h << 1) + (half_dst_w << 1));
uint8_t* dst4 = dst + dst_offset;
std::vector<WH> dst_wh_vec = { { half_dst_w, half_dst_h },{half_dst_w + mode_w,half_dst_h},{half_dst_w,half_dst_h + mode_h},{half_dst_w + mode_w,half_dst_h + mode_h} };
if (omp)
{
std::vector<uint8_t*> src_vec = { src1,src2,src3,src4 };
std::vector<uint8_t*> dst_vec = { dst1,dst2,dst3,dst4 };
auto size = src_vec.size();
omp_set_num_threads(size);
#pragma omp parallel
{
#pragma omp for
for (int i = 0; i < size; i++)
{
uint8_t* src = src_vec[i];
uint8_t* dst = dst_vec[i];
libyuv::CopyPlane(src, src_w << 1, dst, dst_w << 1, dst_wh_vec[i].w * 2, dst_wh_vec[i].h);
}
}
}
else
{
libyuv::CopyPlane(src1, src_w << 1, dst1, dst_w << 1, dst_wh_vec[0].w * 2, dst_wh_vec[0].h);
libyuv::CopyPlane(src2, src_w << 1, dst2, dst_w << 1, dst_wh_vec[1].w * 2, dst_wh_vec[1].h);
libyuv::CopyPlane(src3, src_w << 1, dst3, dst_w << 1, dst_wh_vec[2].w * 2, dst_wh_vec[2].h);
libyuv::CopyPlane(src4, src_w << 1, dst4, dst_w << 1, dst_wh_vec[3].w * 2, dst_wh_vec[3].h);
}
}
static void I422Copy4k(uint8_t* src, const size_t& offset_x, const size_t& offset_y, const size_t& src_w, const size_t& src_h,
uint8_t* dst, const size_t& dst_w, const size_t& dst_h, const int32_t& omp)
{
......
......@@ -143,7 +143,7 @@ HRESULT DeckLinkInputDevice::VideoInputFrameArrived(IDeckLinkVideoInputFrame* vi
deltaTime = currTime - m_lastRecvTS;
if (deltaTime >= 1000)
{
qDebug() << GetCurrDateTimeStr() << " decklink input fps " << m_fps << "\n";
if(m_fps < Settings::FrameRate - 1) qDebug() << GetCurrDateTimeStr() << " decklink input fps " << m_fps << "\n";
m_fps = 0;
m_lastRecvTS = currTime;
......
......@@ -6,6 +6,7 @@
#include "TimePlus.h"
#include "BlackMagicDesign/DeckLinkInputPage.h"
#include "Utils/Platform.h"
#include "Utils/Settings.h"
namespace
{
......@@ -89,7 +90,11 @@ DeckLinkInputPage::DeckLinkInputPage() : SelectedDevice(nullptr), NDIOutput(null
Capture = std::make_shared<CaptureThread>();
Replay = std::make_shared<ReplayThread>();
udp_thread = new QThread();
udpServer = std::make_shared<UdpServer>();
udpServer->moveToThread(udp_thread);
udp_thread->start();
RecordStorePtr = std::make_shared<RecordStore>();
Capture->SetRecordStore(RecordStorePtr);
......@@ -263,6 +268,7 @@ void DeckLinkInputPage::AudioChannelChanged(int selectIndex)
void DeckLinkInputPage::RecvMsg(QByteArray json)
{
//qDebug() << "RecvMsg:" << GetCurrDateTimeStr() << "\n";
QJsonDocument document = QJsonDocument::fromJson(json);
if (document.isObject())
{
......@@ -273,13 +279,16 @@ void DeckLinkInputPage::RecvMsg(QByteArray json)
{
listener_->OnRecvMsg(json);
}
if(type == "REPLAY")
{
if (type == "REPLAY")
{
//auto obj = object["data"].toObject();
auto status = (ReplayStatus)object["status"].toInt();
auto start_time = object["start_time"].toInt();
auto end_time = object["end_time"].toInt();
ReplayParams params{ status, start_time ,end_time };
ReplayParams params;
params.status = status;
params.start_time = start_time;
params.end_time = end_time;
//if (end_time <= start_time || !Replay || !Replay->CanReplay(params)) return;
//qint64 timecode = QString::number(obj.value("timecode").toDouble(), 'f', 0).toLongLong();
switch (status)
......@@ -301,8 +310,29 @@ void DeckLinkInputPage::RecvMsg(QByteArray json)
}
}
//qDebug() << "RecvMsg-->replay" << GetCurrDateTimeStr() << "\n";
}
else if (type == "ClipParam")
{
auto tmp = object["dynamicin"].toInt();
if (tmp <= Settings::MaxDynamicIn && tmp >= 0)
{
if (Settings::ReplayForward != tmp) Settings::ReplayForward = tmp;
if (Settings::SecondSdiOutWaitNumsFront != tmp) Settings::SecondSdiOutWaitNumsFront = tmp;
}
tmp = object["dynamicout"].toInt();
if (tmp <= Settings::MaxDynamicOut && tmp >= 0)
{
if (Settings::DynamicOut != tmp) Settings::DynamicOut = tmp;
}
tmp = object["staticframe"].toInt();
if (tmp >= 0 && tmp <= Settings::MaxStaticFrame) {
if (Settings::SDIOneFrameDuration != tmp) Settings::SDIOneFrameDuration = tmp;
if (Settings::NdiOneFrameDuration != tmp) Settings::NdiOneFrameDuration = tmp;
}
}
}
}
......
......@@ -245,7 +245,7 @@ bool DeckLinkOutputDevice::StartPlayback(BMDDisplayMode displayMode, bool enable
/*scheduleVideoFramesThread = std::thread(&DeckLinkOutputDevice::scheduleVideoFramesFunc, this);
scheduleAudioFramesThread = std::thread(&DeckLinkOutputDevice::scheduleAudioFramesFuncDeley, this);*/
if (Settings::SecondSdiOutWaitNums)
if (Settings::SecondFlag)
{
scheduleVideoFramesThread = std::thread(&DeckLinkOutputDevice::scheduleVideoFramesWaitFunc, this);
}
......@@ -500,34 +500,49 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
qint64 zoom_key = 0;
std::shared_ptr<SampleQueue<std::shared_ptr<VideoFrameWithMask>>> zoom_queue = nullptr;
std::map<qint64, std::shared_ptr<VideoFrameWithMask>>* inner_masked_map = NULL;
std::shared_ptr<VideoFrameWithMask> pic_frame_ptr{ nullptr };
while (true)
{
if (state == PlaybackState::Stopping) break;
std::shared_ptr<VideoFrameWithMask> outputFrame = nullptr;
std::vector<std::shared_ptr<VideoFrameWithMask>> frames;
if (PicFrameQueue.Size())
{
PicFrameQueue.Pop(pic_frame_ptr);
if (pic_frame_ptr && !Settings::UsePicFlag) Settings::UsePicFlag = true;
}
if (outputMaskVideoFrameQueue.WaitFor(Settings::SecondSdiOutWaitNums))
{
outputMaskVideoFrameQueue.Pop(outputFrame);
outputMaskVideoFrameDeque.Pop();
outputMaskVideoFrameDeque.Pop(Settings::SecondSdiOutWaitNumsFront);
if (pic_frame_ptr) outputFrame = pic_frame_ptr;
if (!outputFrame || (!outputFrame->data_ && !outputFrame->pImage && !outputFrame->pImage->data)) continue;
{
if (inner_masked_map)
{
auto key = inner_masked_map->begin()->first;
outputFrame = inner_masked_map->begin()->second;
auto out_key = outputFrame->start_tm_;
inner_masked_map->erase(key);
if (!inner_masked_map->size())
if (!send_one_frame_flag)
{
zoom_key = out_key;
delete inner_masked_map;
masked_map.erase(out_key);
inner_masked_map = NULL;
masked_status_map.erase(out_key);
auto key = inner_masked_map->begin()->first;
outputFrame = inner_masked_map->begin()->second;
auto out_key = outputFrame->start_tm_;
inner_masked_map->erase(key);
if (!inner_masked_map->size())
{
//qDebug() << GetCurrDateTimeStr() << "tm:" << out_key << " will delete...\n";
std::lock_guard<std::mutex> locker(sdi_clear_mutex);
zoom_key = out_key;
delete inner_masked_map;
masked_map.erase(out_key);
inner_masked_map = NULL;
masked_status_map.erase(out_key);
qDebug() << "end send masked data,time:" << GetCurrDateTimeStr() << "\n";
}
}
}
else
{
......@@ -556,6 +571,7 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
end_blend_frame_ = nullptr;
}
continue;
//goto sendSdi;
}
auto itor = masked_status_map.begin();
for (auto pair : masked_map)
......@@ -594,6 +610,8 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
if (itor->second.status == SSS_CLEAR)
{
std::lock_guard<std::mutex> locker(sdi_clear_mutex);
if (recv_end_back_flag) recv_end_back_flag = false;
if (recv_back_num) recv_back_num = 0;
masked_map.erase(masked_itor->first);
inner_masked_map->clear();
delete inner_masked_map;
......@@ -605,18 +623,20 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
{
if (inner_masked_map)
{
auto key = inner_masked_map->begin()->first;
outputFrame = inner_masked_map->begin()->second;
auto out_key = outputFrame->start_tm_;
inner_masked_map->erase(key);
if (!inner_masked_map->size())
{
std::lock_guard<std::mutex> locker(sdi_clear_mutex);
delete inner_masked_map;
masked_map.erase(out_key);
masked_map.erase(itor->first);
inner_masked_map = NULL;
masked_status_map.erase(itor->first);
continue;
}
qDebug() << "start send masked data,time:" << GetCurrDateTimeStr() << "\n";
auto key = inner_masked_map->begin()->first;
outputFrame = inner_masked_map->begin()->second;
auto out_key = outputFrame->start_tm_;
inner_masked_map->erase(key);
}
}
......@@ -627,7 +647,7 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
}
}
if (!outputFrame || (!outputFrame->data_ && !outputFrame->pImage && !outputFrame->pImage->data)) continue;
if (Settings::SDIOneFrameDuration > 0 && outputFrame->flag_ == BS_END)
......@@ -636,10 +656,15 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
}
if (end_blend_frame_ && Settings::SDIOneFrameDuration > 0 && send_count < Settings::SDIOneFrameDuration)
{
send_one_frame_flag = true;
if(!send_one_frame_flag) send_one_frame_flag = true;
send_count++;
}
else if (send_count >= Settings::SDIOneFrameDuration && end_blend_frame_ /*&& (outputFrame->flag_ == BS_IDEL || outputFrame->flag_ == BS_START)*/)
if (send_one_frame_flag)
{
outputFrame = end_blend_frame_;
}
if (send_count >= Settings::SDIOneFrameDuration && end_blend_frame_ /*&& (outputFrame->flag_ == BS_IDEL || outputFrame->flag_ == BS_START)*/)
{
if (Settings::ZoomFlag && !send_zoom_frame_flag /*&& zoomVideoFrameQueue.Size()*/ && zoom_map.Find(zoom_key,zoom_queue))
{
......@@ -654,11 +679,8 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
}
}
if (send_one_frame_flag)
{
outputFrame = end_blend_frame_;
}
else if (send_zoom_frame_flag)
if (send_zoom_frame_flag)
{
if (zoom_queue)
{
......@@ -699,7 +721,7 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
}*/
}
sendSdi:
if (!outputFrame || (!outputFrame->data_ && (!outputFrame->pImage || !outputFrame->pImage->data))) continue;
uint32_t w = outputFrame->width_;
uint32_t h = outputFrame->height_;
......@@ -824,7 +846,7 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
if (deltaTime >= 1000)
{
qDebug() << GetCurrDateTimeStr() << " decklink output fps " << m_fps << ", qsize " << qsize << "\n";
if (m_fps < Settings::FrameRate - 1) qDebug() << GetCurrDateTimeStr() << " decklink output fps " << m_fps << ", qsize " << qsize << "\n";
m_fps = 0;
m_lastRecvTS = currTime;
......@@ -1008,7 +1030,7 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
if (deltaTime >= 1000)
{
qDebug() << GetCurrDateTimeStr() << " decklink output fps " << m_fps << ", qsize " << qsize << "\n";
if(m_fps < Settings::FrameRate - 1) qDebug() << GetCurrDateTimeStr() << " decklink output fps " << m_fps << ", qsize " << qsize << "\n";
m_fps = 0;
m_lastRecvTS = currTime;
......@@ -1279,7 +1301,7 @@ bool DeckLinkOutputDevice::getReferenceSignalMode(BMDDisplayMode* mode)
if (deltaTime >= 1000)
{
qDebug() << GetCurrDateTimeStr() << " decklink output fps " << m_fps << ", qsize " << qsize << "\n";
if (m_fps < Settings::FrameRate - 1) qDebug() << GetCurrDateTimeStr() << " decklink output fps " << m_fps << ", qsize " << qsize << "\n";
m_fps = 0;
m_lastRecvTS = currTime;
......@@ -1544,7 +1566,7 @@ void DeckLinkOutputDevice::AddZoomFrame(std::shared_ptr<VideoFrameWithMask> fram
{
if (frame && (frame->data_ || (frame->pImage && (frame->pImage->data || frame->pImage->uyvy_data))))
{
if(Settings::SecondSdiOutWaitNums)
if(Settings::SecondFlag)
{
auto start_time = frame->start_tm_;
//std::lock_guard<std::mutex> locker(sdi_clear_mutex);
......@@ -1569,65 +1591,162 @@ void DeckLinkOutputDevice::AddZoomFrame(std::shared_ptr<VideoFrameWithMask> fram
}
}
void DeckLinkOutputDevice::ClearFrontQueue()
{
if(FrontMaskVideoFrameQueue.Size()) FrontMaskVideoFrameQueue.Reset();
}
void DeckLinkOutputDevice::AddPicFrame(std::shared_ptr<VideoFrameWithMask> frame)
{
PicFrameQueue.Push(frame);
}
void DeckLinkOutputDevice::AddFrontVideoFrame(std::shared_ptr<VideoFrameWithMask> frame)
{
if (FrontMaskVideoFrameQueue.Size() > Settings::ReplayForward * 2)
{
FrontMaskVideoFrameQueue.Reset();
}
FrontMaskVideoFrameQueue.Push(frame);
}
void DeckLinkOutputDevice::AddVideoFrameMask(std::shared_ptr<VideoFrameWithMask> frame)
{
//auto t1 = TimeMilliSecond();
if (outputMaskVideoFrameQueue.Size() > (50 * 3 + Settings::SecondSdiOutWaitNums) || output_video_frame_map.Size() > (50 * 3 + Settings::SecondSdiOutWaitNums))
{
if(outputMaskVideoFrameQueue.Size()) outputMaskVideoFrameQueue.Reset();
if (output_video_frame_map.Size()) output_video_frame_map.Clear();
outputMaskVideoFrameQueue.Reset();
output_video_frame_map.Clear();
}
#if USETHREADS
if (frame && (frame->data_ || (frame->pImage && (frame->pImage->uyvy_data || frame->pImage->data))))
{
if (send_zoom_frame_flag)
{
//qDebug() << "AddVideoFrameMask:" << "timestamp:" << frame->sequenceNum_ << GetCurrDateTimeStr() << "\n";
}
//if (send_zoom_frame_flag)
//{
// //qDebug() << "AddVideoFrameMask:" << "timestamp:" << frame->sequenceNum_ << GetCurrDateTimeStr() << "\n";
//}
//qDebug() << "AddVideoFrameMask:" << "timestamp:" << frame->sequenceNum_ << GetCurrDateTimeStr() << "\n";
if (frame->fmt_ == bmdFormat8BitYUV)
{
if (Settings::SecondSdiOutWaitNums)
if (Settings::SecondFlag)
{
if (frame->flag_ == BS_IDEL)
if (frame->flag_ == BS_IDEL || frame->flag_ == BS_RP_START_FRONT || frame->flag_ == BS_RP_END_BACK)
{
outputMaskVideoFrameQueue.Push(frame);
outputMaskVideoFrameDeque.PushBack(frame);
//if (recv_end_back_flag)
//{
// std::lock_guard<std::mutex> locker(sdi_clear_mutex);
// auto itor = masked_map.find(last_start_tm);
// if (itor != masked_map.end())
// {
// recv_back_num++;
// if (recv_back_num >= Settings::SecondSdiOutWaitNumsBack)
// {
// recv_end_back_flag = false;
// recv_back_num = 0;
// //qDebug() << GetCurrDateTimeStr() <<" second sdi add frame end and start_tm:" << last_start_tm << ",map_size:"<< itor->second->size() << "\n";
// }
// frame->start_tm_ = last_start_tm;
// itor->second->insert({ frame->timestamp_, frame });
// }
// else
// {
// recv_end_back_flag = false;
// recv_back_num = 0;
// }
//
//}
}
else
{
auto itor = masked_map.find(frame->start_tm_);
if (frame->flag_ == BS_START)
{
auto itor = masked_map.find(frame->start_tm_);
if (itor != masked_map.end()) return;
else
{
//qDebug() << GetCurrDateTimeStr() <<" second sdi start frame tm:" << frame->start_tm_ << "\n";
/*if (recv_end_back_flag) recv_end_back_flag = false;
if (recv_back_num) recv_back_num = 0;*/
last_start_tm = frame->start_tm_;
auto tmp_map = new std::map<qint64, std::shared_ptr<VideoFrameWithMask>>();
int size = outputMaskVideoFrameDeque.Size();
int nums = (size > Settings::SecondSdiOutWaitNums ? Settings::SecondSdiOutWaitNums : size);
std::vector<std::shared_ptr<VideoFrameWithMask>> tmp_vec;
outputMaskVideoFrameDeque.Put(nums, tmp_vec);
for (auto data : tmp_vec)
if (!frame->replay_flag /*|| !Settings::ReplayForward*/ && Settings::SecondSdiOutWaitNumsFront)
{
if (data)
int size = outputMaskVideoFrameDeque.Size();
int nums = (size > Settings::SecondSdiOutWaitNumsFront ? Settings::SecondSdiOutWaitNumsFront : size);
std::vector<std::shared_ptr<VideoFrameWithMask>> tmp_vec;
auto ret = outputMaskVideoFrameDeque.Put(nums, tmp_vec);
if (ret)
{
for (auto data : tmp_vec)
{
if (data)
{
if (0 == data->start_tm_)
{
data->start_tm_ = frame->timestamp_;
tmp_map->insert({ data->timestamp_,data });
}
}
}
}
}
else
{
auto size = FrontMaskVideoFrameQueue.Size();
qint32 i = 0;
while (!FrontMaskVideoFrameQueue.Empty())
{
data->start_tm_ = frame->timestamp_;
tmp_map->insert({ data->timestamp_,data });
std::shared_ptr<VideoFrameWithMask> tmp_frame = nullptr;
if (FrontMaskVideoFrameQueue.Pop(tmp_frame))
{
if (tmp_frame)
{
tmp_frame->start_tm_ = last_start_tm;
/*tmp_frame->timestamp_ = frame->timestamp_ - size + i;
i++;*/
tmp_map->insert({ tmp_frame->timestamp_,tmp_frame });
}
}
}
}
tmp_map->insert({ frame->timestamp_, frame });
masked_map.insert({ frame->timestamp_,tmp_map });
tmp_map->insert({ frame->timestamp_, frame});
}
}
else
else if(frame->flag_ == BS_ING || frame->flag_ == BS_END)
{
std::lock_guard<std::mutex> locker(sdi_clear_mutex);
auto itor = masked_map.find(frame->start_tm_);
if (itor == masked_map.end()) return;
itor->second->insert({ frame->timestamp_, frame });
//if (frame->flag_ == BS_END && Settings::SecondSdiOutWaitNumsBack) recv_end_back_flag = true;
if (frame->replay_flag)
{
while (!FrontMaskVideoFrameQueue.Empty())
{
std::shared_ptr<VideoFrameWithMask> tmp_frame = nullptr;
if (FrontMaskVideoFrameQueue.Pop(tmp_frame))
{
if (tmp_frame)
{
tmp_frame->start_tm_ = frame->start_tm_;
/*tmp_frame->timestamp_ = frame->timestamp_ - size + i;
i++;*/
itor->second->insert({ tmp_frame->timestamp_,tmp_frame });
}
}
}
}
}
}
......@@ -1640,7 +1759,7 @@ void DeckLinkOutputDevice::AddVideoFrameMask(std::shared_ptr<VideoFrameWithMask>
}
else if (frame->fmt_ == bmdFormat8BitBGRA)
{
if (Settings::SecondSdiOutWaitNums)
if (Settings::SecondFlag)
{
if (frame->flag_ == BS_START)
{
......@@ -1649,7 +1768,7 @@ void DeckLinkOutputDevice::AddVideoFrameMask(std::shared_ptr<VideoFrameWithMask>
{
auto tmp_map = new std::map<qint64, std::shared_ptr<VideoFrameWithMask>>();
int size = outputMaskVideoFrameDeque.Size();
int nums = (size > Settings::SecondSdiOutWaitNums ? Settings::SecondSdiOutWaitNums : size);
int nums = (size > Settings::SecondSdiOutWaitNumsFront ? Settings::SecondSdiOutWaitNumsFront : size);
std::vector<std::shared_ptr<VideoFrameWithMask>> tmp_vec;
outputMaskVideoFrameDeque.Put(nums, tmp_vec);
for (auto data : tmp_vec)
......@@ -1859,7 +1978,7 @@ void DeckLinkOutputDevice::BGRAToUYVY(const std::shared_ptr<VideoFrameWithMask>&
std::make_shared<VideoFrameWithMask>(src_w,src_h,tm,image->sequenceNum_,uyvy,bmdFormat8BitYUV,image->flag_,image->meta_, start_time);
//sort_map_.insert({ image->timestamp_ ,video_frame });
if(Settings::SecondSdiOutWaitNums)
if(Settings::SecondFlag)
{
std::lock_guard<std::mutex> locker(sdi_clear_mutex);
auto itor = masked_map.find(start_time);
......
......@@ -107,11 +107,6 @@ DeckLinkOutputPage::DeckLinkOutputPage() : SelectedDevice(nullptr), Process(null
if (!Zoom)
{
Zoom = std::make_shared<ZoomThread>();
Zoom->start();
if (Zoom)
{
connect(this, &DeckLinkOutputPage::PushAttribute, Zoom.get(), &ZoomThread::addSportAttr, Qt::DirectConnection);
}
}
}
......@@ -304,14 +299,23 @@ void DeckLinkOutputPage::RequestedDeviceGranted(ComPtr<IDeckLink>& device)
connect(BindingInputPage->GetCapture(), &CaptureThread::PushFrame, ProcessMask.get(), &ProcessMaskThread::AddFrame, Qt::DirectConnection);
connect(BindingInputPage->GetReplay(), &ReplayThread::PushFrame, ProcessMask.get(), &ProcessMaskThread::AddFrame, Qt::DirectConnection);
connect(BindingInputPage->GetReplay(), &ReplayThread::PushFrameForSdi, SelectedDevice.Get(), &DeckLinkOutputDevice::AddFrontVideoFrame, Qt::DirectConnection);
connect(ProcessMask.get(), &ProcessMaskThread::PushFrame, NDIOutput.get(), &NDIOutputThread::AddVideoFrameWithMask, Qt::DirectConnection);
//if (MqThread) connect(MqThread.get(), &ConsumerMqThread::PushMask, ProcessMask.get(), &ProcessMaskThread::addMaskBuffer);
connect(this, &DeckLinkOutputPage::PushMask, ProcessMask.get(), &ProcessMaskThread::addMaskBuffer, Qt::DirectConnection);
if (DecodeThread)
{
connect(DecodeThread.get(), &DecodeMaskThread::PushMask, ProcessMask.get(), &ProcessMaskThread::addMaskBuffer, Qt::DirectConnection);
}
//connect(this, &DeckLinkOutputPage::PushMask, ProcessMask.get(), &ProcessMaskThread::addMaskBuffer, Qt::DirectConnection);
connect(ProcessMask.get(), &ProcessMaskThread::PushFrame, SelectedDevice.Get(), &DeckLinkOutputDevice::AddVideoFrameMask, Qt::DirectConnection);
connect(ProcessMask.get(), &ProcessMaskThread::PushFrameToReplay, BindingInputPage->GetReplay(), &ReplayThread::addCropFrame, Qt::DirectConnection);
if (Zoom)
{
connect(Zoom.get(), &ZoomThread::PushFrame, SelectedDevice.Get(), &DeckLinkOutputDevice::AddZoomFrame, Qt::DirectConnection);
connect(SelectedDevice.Get(), &DeckLinkOutputDevice::SendZoomResult, Zoom.get(), &ZoomThread::recvResult, Qt::DirectConnection);
connect(this, &DeckLinkOutputPage::PushAttribute, Zoom.get(), &ZoomThread::addSportAttr, Qt::DirectConnection);
Zoom->start();
}
#elif USE_H2V
connect(BindingInputPage->GetCapture(), &CaptureThread::PushFrame, Process.get(), &ProcessThread::AddFrame);
......@@ -370,11 +374,12 @@ void DeckLinkOutputPage::OnRecvMqMsg(const QJsonDocument& document)
if (obj.isEmpty()) return;
if (type == "SENDMASK")
{
auto buffer = std::make_shared<MaskBuffer>(obj);
/*auto buffer = std::make_shared<MaskBuffer>(obj);
if (buffer && buffer->signal != -1)
{
emit PushMask(buffer);
}
}*/
if (DecodeThread) DecodeThread->addData(obj);
}
else if (type == "SENDPOINTS")
{
......@@ -406,6 +411,11 @@ void DeckLinkOutputPage::OnRecvMsg(QByteArray data)
{
ProcessMask->ClearQueue();
}
if (SelectedDevice && (status == RS_START || status == RS_RE_START))
{
SelectedDevice->ClearFrontQueue();
}
}
else if (type == "LOADMASK")
{
......@@ -431,6 +441,45 @@ void DeckLinkOutputPage::OnRecvMsg(QByteArray data)
Zoom->setResult(info);
}
}
else if (type == "CUT")
{
auto points = object["point"].toArray();
auto x = points[0].toInt();
auto y = points[1].toInt();
auto crop_w = object["width"].toInt();
auto crop_h = object["height"].toInt();
CropMessage msg(x,y,crop_w,crop_h);
if (crop_w && crop_h && x >=0 && y>=0)
{
if (ProcessMask && msg.check())
{
if (!msg.checkCrop())
{
if (Settings::CropFlag) Settings::CropFlag = false;
Settings::CropMsg = msg;
}
else
{
auto crop = ProcessMask->GetCropThread();
if (crop)
{
crop->addCropMsg(msg);
}
auto crop_1 = BindingInputPage->GetReplay()->GetCropThread();
if (crop_1)
{
crop_1->addCropMsg(msg);
}
if (!Settings::CropFlag) Settings::CropFlag = true;
Settings::CropMsg = msg;
}
}
}
}
}
}
......@@ -552,6 +601,9 @@ void DeckLinkOutputPage::ObjectNameChanged(const QString& newName)
std::string exchange_name = queue_name;
MqThread = std::make_shared<ConsumerMqThread>(queue_name, exchange_name, "127.0.0.1", "admin", "123456",this);
MqThread->start();
DecodeThread = std::make_unique<DecodeMaskThread>();
DecodeThread->start();
}
#elif USE_H2V
QString ndi_name = QString("h2v-4k-output-") + newName.at(newName.size() - 1);
......@@ -650,4 +702,12 @@ void DeckLinkOutputPage::InitControlValue(const qint32& index)
}
if (PreviewView) PreviewView->GetOverlay()->SetDeleyTime(DeleyTime);
}
void DeckLinkOutputPage::ForwardPicData(std::shared_ptr<VideoFrameWithMask> frame)
{
if (SelectedDevice)
{
SelectedDevice->AddPicFrame(frame);
}
}
\ No newline at end of file
#include "BlackMagicDesign/OpenFile.h"
#include <QFileDialog>
#include "Utils/yuv4k.h"
#include "Utils/Common.h"
#include "Utils/Settings.h"
COpenFile::COpenFile(QObject* parent) : QObject(parent)
{
}
COpenFile::~COpenFile()
{}
void COpenFile::doOpenFile()
{
QString s = QFileDialog::getOpenFileName(NULL, QString::fromWCharArray(L"ѡļ"), "/", "Files(*.png *.jpg)");
if (!s.isEmpty())
{
path = s;
std::string str_path = path.toLocal8Bit();
auto bk_mat = cv::imread(str_path, cv::IMREAD_COLOR);
if (bk_mat.data == nullptr)
{
qDebug() << "read pic fail........." << "\n";
return;
}
if (bk_mat.cols != K4WIDTH || bk_mat.rows != K4HEIGHT) return;
//cv::cvtColor(bk_mat, bk_mat, cv::COLOR_RGB2RGBA);
emit tellFilePath(path);
qint32 size = K4WIDTH * K4HEIGHT << 1;
uint8_t* data = new uint8_t[K4WIDTH * K4HEIGHT << 1];
cv::cvtColor(bk_mat, bk_mat, cv::COLOR_BGR2BGRA);
Yuv4k::ARGBToUYVY4K(bk_mat.data, K4WIDTH * K4HEIGHT, data, K4WIDTH * K4HEIGHT >> 1, K4WIDTH >> 1, K4HEIGHT >> 1);
std::shared_ptr<VideoFrameWithMask> frame = std::make_shared<VideoFrameWithMask>(K4WIDTH,K4HEIGHT, data,bmdFormat8BitYUV, BS_IDEL, kDefaultHLGBT2020HDRMetadata);
emit SendPicData(frame);
}
}
\ No newline at end of file
......@@ -13,7 +13,7 @@
//extern qint64 StartTimeStamp;
//extern int OneFrameDuration;
#define SENDBGRA 1
#define MAXSIZE 30
#define MAXSIZE 50
#define OUT_UYVY 1
NDIOutputThread::NDIOutputThread(const QString& Name, int w, int h, qint32 deleyTime) : NDISenderName(Name), deleyTime_(deleyTime), width(w), height(h), isSending(false), Instance(nullptr), cropFlag(false),
......@@ -207,32 +207,12 @@ void NDIOutputThread::run()
if (!Init())
return;
if (cropFlag) audio_thread = std::thread(&NDIOutputThread::SendAudioFunc, this);
//if (cropFlag) audio_thread = std::thread(&NDIOutputThread::SendAudioFunc, this);
std::atomic_bool send_one_frame_flag = false;
while (true)
{
if (cropFlag)
{
std::shared_ptr<Image> frame = nullptr;
if (taskQueue.WaitFor(frame))
{
if (frame && frame->IsValid())
{
auto now_time = QDateTime::currentMSecsSinceEpoch();
auto dever_time = now_time - frame->getInputFrameCurTimeStamp();
if (dever_time < deleyTime_)
{
current_v_sleep_ms = deleyTime_ - dever_time;
std::this_thread::sleep_for(std::chrono::milliseconds(current_v_sleep_ms));
}
srcBuff = frame->GetBytes();
Frame.p_data = (uint8_t*)srcBuff;
NDIlib_send_send_video_v2(Instance, &Frame);
}
}
}
else if (timePlusFlag)
if (timePlusFlag)
{
if (VideoMaskQueue.WaitFor(Settings::RTSDIOutputWaitNums))
{
......@@ -240,16 +220,16 @@ void NDIOutputThread::run()
VideoMaskQueue.Pop(frame_mask);
if (!frame_mask || (!frame_mask->data_ && !frame_mask->pImage && !frame_mask->pImage->data)) continue;
if (Settings::OneFrameDuration > 0 && frame_mask->flag_ == BS_END)
if (Settings::NdiOneFrameDuration > 0 && frame_mask->flag_ == BS_END)
{
end_blend_frame_ = frame_mask;
}
if (Settings::OneFrameDuration > 0 && send_count < Settings::OneFrameDuration && end_blend_frame_)
if (Settings::NdiOneFrameDuration > 0 && send_count < Settings::NdiOneFrameDuration && end_blend_frame_)
{
send_one_frame_flag = true;
send_count++;
}
else if (send_count >= Settings::OneFrameDuration && end_blend_frame_ && (frame_mask->flag_ == BS_IDEL || frame_mask->flag_ == BS_START))
else if (send_count >= Settings::NdiOneFrameDuration && end_blend_frame_ && (frame_mask->flag_ == BS_IDEL || frame_mask->flag_ == BS_START || frame_mask->flag_ == BS_RP_END_BACK))
{
send_one_frame_flag = false;
send_count = 0;
......@@ -261,6 +241,14 @@ void NDIOutputThread::run()
}
else
{
/*if ((frame_mask->flag_ == BS_IDEL || frame_mask->flag_ == BS_RP_END_BACK) && VideoMaskBackQueue.Size() )
{
VideoMaskBackQueue.Pop(frame_mask);
}*/
/*else
{
if (VideoMaskBackQueue.Size()) VideoMaskBackQueue.Reset();
}*/
if (frame_mask->data_) Frame.p_data = frame_mask->data_;
else Frame.p_data = frame_mask->pImage->uyvy_data;
}
......@@ -270,17 +258,6 @@ void NDIOutputThread::run()
}
}
else if (filterFlag)
{
if (VideoFilterQueue.WaitFor(frame))
{
if (!frame || !frame->data) continue;
Frame.p_data = frame->data;
Frame.timestamp = frame->timestamp;
NDIlib_send_send_video_v2(Instance, &Frame);
}
}
else{
if (VideoQueue.WaitForLast(frame))
{
......@@ -340,7 +317,7 @@ void NDIOutputThread::run()
}
}
/*m_fps++;
currTime = TimeMilliSecond();
deltaTime = currTime - m_lastTS;
......@@ -352,7 +329,6 @@ void NDIOutputThread::run()
m_lastTS = currTime;
}*/
}
}
#if 0
......@@ -459,8 +435,38 @@ void NDIOutputThread::AddVideoFrameWithMask(std::shared_ptr<VideoFrameWithMask>
VideoMaskQueue.Reset();
}
if(video_frame && (video_frame->data_ || (video_frame->pImage && video_frame->pImage->data)))
{
if (video_frame && (video_frame->data_ || (video_frame->pImage && video_frame->pImage->data)))
{
VideoMaskQueue.Push(video_frame);
/*if (Settings::SecondSdiOutWaitNumsBack)
{
if (video_frame->flag_ == BS_END)
{
recv_back_flag = true;
recv_back_num = 0;
}
else if (video_frame->flag_ == BS_START || video_frame->flag_ == BS_RP_START_FRONT)
{
if (recv_back_num) recv_back_num = 0;
if (recv_back_flag) recv_back_flag = false;
if (VideoMaskBackQueue.Size()) VideoMaskBackQueue.Reset();
}
if (video_frame->flag_ == BS_IDEL || video_frame->flag_ == BS_RP_END_BACK)
{
if (recv_back_flag)
{
VideoMaskBackQueue.Push(video_frame);
recv_back_num++;
if (recv_back_num >= Settings::SecondSdiOutWaitNumsBack)
{
recv_back_num = 0;
recv_back_flag = false;
}
}
}
}*/
}
}
\ No newline at end of file
......@@ -99,6 +99,7 @@ void CaptureThread::AddFrame(ComPtr<IDeckLinkVideoInputFrame> videoFrame, const
}
void CaptureThread::RecvReplayStatus(const ReplayStatus& status)
{
qDebug() << "CaptureThread::RecvReplayStatus:" << GetCurrDateTimeStr() << "\n";
replay_status = status;
}
......
#include "Threads/ConsumerMqThread.h"
#include "Utils/MaskBuffer.h"
ConsumerMqThread::ConsumerMqThread(const std::string& queue_name, const std::string& exchange_name,
const std::string& ip, const std::string& user_id, const std::string& pwd, Listener* listerner):mq_queue_name(queue_name),
......@@ -17,11 +16,50 @@ void ConsumerMqThread::run()
{
if(setup_connection_and_channel())
{
read_msg();
//read_msg();
read_consume_msg();
}
}
void ConsumerMqThread::read_consume_msg()
{
amqp_basic_consume_ok_t* result =
amqp_basic_consume(mq_connection, channel_id,
amqp_cstring_bytes(mq_queue_name.c_str()), amqp_empty_bytes,
/*no_local*/ 0,
/*no_ack*/ 1,
/*exclusive*/ 0, amqp_empty_table);
if (!result)
{
qDebug() << "mq thread recv failed............ \n";
return;
}
while (true)
{
amqp_envelope_t envelope;
struct timeval timeout = { 100000000, 0 };
amqp_rpc_reply_t rpc_reply =
amqp_consume_message(mq_connection, &envelope, &timeout, 0);
if (rpc_reply.reply_type == AMQP_RESPONSE_NORMAL)
{
QByteArray array(QByteArray::fromRawData((char*)envelope.message.body.bytes, envelope.message.body.len));
QJsonDocument document = QJsonDocument::fromJson(array);
if (mq_listener) mq_listener->OnRecvMqMsg(document);
/*amqp_destroy_envelope(&envelope);
amqp_release_buffers(mq_connection);*/
}
/*else
{
amqp_destroy_envelope(&envelope);
}*/
amqp_destroy_envelope(&envelope);
amqp_release_buffers(mq_connection);
}
}
void ConsumerMqThread::read_msg()
{
std::string str;
......
#include "Threads/CropThread.h"
#include "Utils/yuv4k.h"
#include "Utils/Settings.h"
CropThread::CropThread(Listener* listener, const CropMessage& crop_msg_)
{
p_listener = listener;
crop_msg = crop_msg_;
if (!i422_4k_data) {
i422_4k_data = new uint8_t[K4WIDTH * K4HEIGHT << 1];
}
if (!i422_crop_data)
{
i422_crop_data = new uint8_t[crop_msg.crop_w * crop_msg.crop_h << 1];
}
if (!uyvy_crop_data)
{
uyvy_crop_data = new uint8_t[crop_msg.crop_w * crop_msg.crop_h << 1];
}
}
CropThread::CropThread(Listener* listener)
{
p_listener = listener;
if (!i422_4k_data)
{
i422_4k_data = new uint8_t[K4WIDTH * K4HEIGHT << 1];
}
}
CropThread::~CropThread()
{
if (i422_4k_data) delete i422_4k_data;
if (i422_crop_data) delete i422_crop_data;
if (uyvy_crop_data) delete uyvy_crop_data;
}
void CropThread::addVideoFrame(std::shared_ptr<VideoFrameWithMask> frame)
{
if (frame)
{
crop_queue.Push(frame);
}
}
void CropThread::addCropMsg(const CropMessage& msg)
{
if (!crop_msg.equal(msg))
{
crop_msg_queue.PushOnly(msg);
//if(p_listener) p_listener->OnChange();
}
}
bool CropThread::reCropRes()
{
return true;
}
void CropThread::run()
{
CropScale();
}
void CropThread::CropScale()
{
while (true)
{
std::shared_ptr<VideoFrameWithMask> p_frame = nullptr;
if (crop_queue.WaitFor(p_frame))
{
if (crop_msg_queue.Size())
{
CropMessage tmp_msg;
crop_msg_queue.PopLast(tmp_msg);
/*if (!crop_msg.equal(tmp_msg) && p_listener)
{
p_listener->OnChange();
}*/
if (tmp_msg.crop_w && tmp_msg.crop_h)
{
if (tmp_msg.crop_w != crop_msg.crop_w || tmp_msg.crop_h != crop_msg.crop_h)
{
if (i422_crop_data) delete i422_crop_data;
if (uyvy_crop_data) delete uyvy_crop_data;
i422_crop_data = new uint8_t[tmp_msg.crop_w * tmp_msg.crop_h << 1];
uyvy_crop_data = new uint8_t[tmp_msg.crop_w * tmp_msg.crop_h << 1];
}
crop_msg = tmp_msg;
}
}
if (p_frame)
{
uint8_t* uyvy_data = NULL;
if (crop_msg.checkCrop())
{
if (p_frame->fmt_ == bmdFormat8BitBGRA && p_frame->data_)
{
}
else if (p_frame->fmt_ == bmdFormat8BitYUV && (p_frame->data_ || (p_frame->pImage && p_frame->pImage->data)))
{
uyvy_data = (p_frame->data_ ? p_frame->data_ : p_frame->pImage->data);
Yuv4k::UYVYCopy(uyvy_data, crop_msg.crop_x, crop_msg.crop_y, p_frame->width_, p_frame->height_, uyvy_crop_data, crop_msg.crop_w, crop_msg.crop_h);
Yuv4k::UYVYToI422(uyvy_crop_data, i422_crop_data, crop_msg.crop_w, crop_msg.crop_h);
Yuv4k::I422Scale4K(i422_crop_data, crop_msg.crop_w, crop_msg.crop_h, i422_4k_data, p_frame->width_, p_frame->height_, Settings::ZoomUseOmp);
Yuv4k::I422ToUYVY(i422_4k_data, uyvy_data, p_frame->width_, p_frame->height_, Settings::ZoomUseOmp);
p_frame->crop_msg = crop_msg;
}
}
if (p_listener) p_listener->OnRecvViedoFrame(p_frame);
}
}
}
}
\ No newline at end of file
#include "Threads/DecodeMaskThread.h"
void DecodeMaskThread::addData(const QJsonObject& obj)
{
queue.Push(obj);
}
void DecodeMaskThread::run()
{
QJsonObject obj;
while (true)
{
if (queue.WaitFor(obj))
{
auto t = TimeMilliSecond();
auto buffer = std::make_shared<MaskBuffer>(obj);
//qDebug() << "maskbuffer is created ,duration:" << TimeMilliSecond() - t << "\n";
if (buffer->upper_left_point.x < 0 || buffer->upper_left_point.y < 0 || buffer->width >= K4WIDTH || buffer->height >= K4HEIGHT) continue;
if (buffer && buffer->signal != -1)
{
emit PushMask(buffer);
}
}
}
}
\ No newline at end of file
......@@ -49,18 +49,33 @@ ProcessMaskThread::ProcessMaskThread()
bk_argb_attenuate = new uint8_t[K4WIDTH * K4HEIGHT << 2];
}
if (Settings::CropFlag)
{
crop_msg = CropMessage(Settings::CropX,Settings::CropY,(CropDirection)Settings::CropDirection);
if (crop_msg.crop_x || crop_msg.crop_y)
{
crop_thread = std::thread(&ProcessMaskThread::CropScale, this);
if (!crop_data)
{
crop_data = new uint8_t[crop_msg.crop_w * crop_msg.crop_h << 2];
}
}
}
//if (Settings::CropFlag)
//{
// crop_msg = CropMessage(Settings::CropX,Settings::CropY,(CropDirection)Settings::CropDirection);
// crop_thread = std::make_shared<CropThread>(this,crop_msg);
// if(crop_thread) crop_thread->start();
// //if (crop_msg.crop_x >=0 || crop_msg.crop_y >= 0)
// //{
// // crop_thread = std::thread(&ProcessMaskThread::CropScaleWithUYVY, this);
// // /*if (!crop_data)
// // {
// // crop_data = new uint8_t[crop_msg.crop_w * crop_msg.crop_h << 2];
// // }*/
// // if (!i422_4k_data) {
// // i422_4k_data = new uint8_t[K4WIDTH * K4HEIGHT << 1];
// // }
// // if (!i422_crop_data)
// // {
// // i422_crop_data = new uint8_t[crop_msg.crop_w * crop_msg.crop_h << 1];
// // }
// // if (!uyvy_crop_data)
// // {
// // uyvy_crop_data = new uint8_t[crop_msg.crop_w * crop_msg.crop_h << 1];
// // }
// //}
//}
}
ProcessMaskThread:: ~ProcessMaskThread()
{
......@@ -68,8 +83,11 @@ ProcessMaskThread:: ~ProcessMaskThread()
if (tmp_alpha) delete tmp_alpha;
if (bk_argb) delete bk_argb;
if (bk_argb_attenuate) delete bk_argb_attenuate;
if (crop_data) delete crop_data;
if (tmp_uyvy) delete tmp_uyvy;
/*if (crop_data) delete crop_data;
if (i422_4k_data) delete i422_4k_data;
if (i422_crop_data) delete i422_crop_data;
if (uyvy_crop_data) delete uyvy_crop_data;*/
}
void ProcessMaskThread::addMaskBuffer(std::shared_ptr<MaskBuffer> buffer)
......@@ -82,6 +100,7 @@ void ProcessMaskThread::addMaskBuffer(std::shared_ptr<MaskBuffer> buffer)
if (itor == mask_map.end())
{
mask_map.insert({ key,buffer });
//qDebug() << GetCurrDateTimeStr() << "recv masked and tm:" << key << ",signal:" << buffer->signal<< "\n";
}
}
......@@ -101,9 +120,24 @@ void ProcessMaskThread::AddFrame(std::shared_ptr<videoFrameData> frame_data)
}
}
CropThread* ProcessMaskThread::GetCropThread()
{
if (!crop_thread)
{
crop_thread = std::make_shared<CropThread>(this);
if (crop_thread)
{
crop_thread->start();
return crop_thread.get();
}
else return NULL;
};
return crop_thread.get();
}
void ProcessMaskThread::ClearQueue()
{
taskImageQueue.Reset();
if(taskImageQueue.Size()) taskImageQueue.Reset();
}
void ProcessMaskThread::run()
......@@ -119,7 +153,7 @@ void ProcessMaskThread::process()
while (true)
{
std::shared_ptr<videoFrameData> image = nullptr;
if (taskImageQueue.WaitFor(min_size - 1) /*&& taskImageQueue.Size() >= min_size*/)//有横屏数据了
if (taskImageQueue.WaitFor(min_size) /*&& taskImageQueue.Size() >= min_size*/)//有横屏数据了
{
/*auto ret = taskImageQueue.Front(image);
if (ret)
......@@ -193,42 +227,97 @@ void ProcessMaskThread::blend(uint8_t* src_argb, int& src_stride_width, uint8_t*
}
void ProcessMaskThread::CropScale()
void ProcessMaskThread::OnChange()
{}
void ProcessMaskThread::OnRecvViedoFrame(std::shared_ptr<VideoFrameWithMask> frame)
{
while (true)
if (frame)
{
std::shared_ptr<VideoFrameWithMask> p_frame = nullptr;
if (crop_queue.WaitFor(p_frame))
emit PushFrame(frame);
if (!frame->replay_flag && frame->flag_ == BS_IDEL)
{
if (p_frame)
{
uint8_t* bgra_data = NULL;
if (p_frame->fmt_ == bmdFormat8BitBGRA && p_frame->data_)
{
bgra_data = p_frame->data_;
//copy to crop_data
Yuv4k::ARGBCopy4k(bgra_data, crop_msg.crop_x, crop_msg.crop_y, p_frame->width_, p_frame->height_,
crop_data, crop_msg.crop_w, crop_msg.crop_h, Settings::ZoomUseOmp, Settings::ZoomScaleType);
Yuv4k::ARGBScale4K(crop_data, crop_msg.crop_w, crop_msg.crop_h,
bgra_data, p_frame->width_, p_frame->height_, Settings::ZoomUseOmp, Settings::ZoomScaleType, Settings::ZoomScaleFilter);
}
else if (p_frame->fmt_ == bmdFormat8BitYUV && p_frame->pImage && p_frame->pImage->data)
{
bgra_data = p_frame->pImage->data;
p_frame->fmt_ = bmdFormat8BitBGRA;
Yuv4k::ARGBCopy4k(bgra_data, crop_msg.crop_x, crop_msg.crop_y, p_frame->width_, p_frame->height_,
crop_data, crop_msg.crop_w, crop_msg.crop_h, Settings::ZoomUseOmp, Settings::ZoomScaleType);
Yuv4k::ARGBScale4K(crop_data, crop_msg.crop_w, crop_msg.crop_h,
bgra_data, p_frame->width_, p_frame->height_, Settings::ZoomUseOmp, Settings::ZoomScaleType, Settings::ZoomScaleFilter);
}
emit PushFrame(p_frame);
}
emit PushFrameToReplay(frame);
}
}
}
void ProcessMaskThread::CropScaleWithUYVY()
{
//while (true)
//{
// std::shared_ptr<VideoFrameWithMask> p_frame = nullptr;
// if (crop_queue.WaitFor(p_frame))
// {
// if (p_frame)
// {
// uint8_t* uyvy_data = NULL;
// if (p_frame->fmt_ == bmdFormat8BitBGRA && p_frame->data_)
// {
// //bgra_data = p_frame->data_;
// ////copy to crop_data
// //Yuv4k::ARGBCopy4k(bgra_data, crop_msg.crop_x, crop_msg.crop_y, p_frame->width_, p_frame->height_,
// // crop_data, crop_msg.crop_w, crop_msg.crop_h, Settings::ZoomUseOmp, Settings::ZoomScaleType);
// //Yuv4k::ARGBScale4K(crop_data, crop_msg.crop_w, crop_msg.crop_h,
// // bgra_data, p_frame->width_, p_frame->height_, Settings::ZoomUseOmp, Settings::ZoomScaleType, Settings::ZoomScaleFilter);
// }
// else if (p_frame->fmt_ == bmdFormat8BitYUV && (p_frame->data_ || (p_frame->pImage && p_frame->pImage->data)))
// {
// uyvy_data = (p_frame->data_ ? p_frame->data_ : p_frame->pImage->data);
// Yuv4k::UYVYCopy(uyvy_data, crop_msg.crop_x, crop_msg.crop_y, p_frame->width_, p_frame->height_, uyvy_crop_data, crop_msg.crop_w, crop_msg.crop_h);
// Yuv4k::UYVYToI422(uyvy_crop_data, i422_crop_data, crop_msg.crop_w, crop_msg.crop_h);
// Yuv4k::I422Scale4K(i422_crop_data, crop_msg.crop_w, crop_msg.crop_h, i422_4k_data, p_frame->width_, p_frame->height_, Settings::ZoomUseOmp);
// Yuv4k::I422ToUYVY(i422_4k_data, uyvy_data, p_frame->width_, p_frame->height_, Settings::ZoomUseOmp);
// /*Yuv4k::ARGBCopy4k(bgra_data, crop_msg.crop_x, crop_msg.crop_y, p_frame->width_, p_frame->height_,
// crop_data, crop_msg.crop_w, crop_msg.crop_h, Settings::ZoomUseOmp, Settings::ZoomScaleType);
// Yuv4k::ARGBScale4K(crop_data, crop_msg.crop_w, crop_msg.crop_h,
// bgra_data, p_frame->width_, p_frame->height_, Settings::ZoomUseOmp, Settings::ZoomScaleType, Settings::ZoomScaleFilter);*/
// }
// emit PushFrame(p_frame);
// }
// }
//}
}
void ProcessMaskThread::CropScale()
{
//while (true)
//{
// std::shared_ptr<VideoFrameWithMask> p_frame = nullptr;
// if (crop_queue.WaitFor(p_frame))
// {
// if (p_frame)
// {
// uint8_t* bgra_data = NULL;
// if (p_frame->fmt_ == bmdFormat8BitBGRA && p_frame->data_)
// {
// bgra_data = p_frame->data_;
// //copy to crop_data
// Yuv4k::ARGBCopy4k(bgra_data, crop_msg.crop_x, crop_msg.crop_y, p_frame->width_, p_frame->height_,
// crop_data, crop_msg.crop_w, crop_msg.crop_h, Settings::ZoomUseOmp, Settings::ZoomScaleType);
// Yuv4k::ARGBScale4K(crop_data, crop_msg.crop_w, crop_msg.crop_h,
// bgra_data, p_frame->width_, p_frame->height_, Settings::ZoomUseOmp, Settings::ZoomScaleType, Settings::ZoomScaleFilter);
// }
// else if (p_frame->fmt_ == bmdFormat8BitYUV && p_frame->pImage && p_frame->pImage->data)
// {
// bgra_data = p_frame->pImage->data;
// p_frame->fmt_ = bmdFormat8BitBGRA;
// Yuv4k::ARGBCopy4k(bgra_data, crop_msg.crop_x, crop_msg.crop_y, p_frame->width_, p_frame->height_,
// crop_data, crop_msg.crop_w, crop_msg.crop_h, Settings::ZoomUseOmp, Settings::ZoomScaleType);
// Yuv4k::ARGBScale4K(crop_data, crop_msg.crop_w, crop_msg.crop_h,
// bgra_data, p_frame->width_, p_frame->height_, Settings::ZoomUseOmp, Settings::ZoomScaleType, Settings::ZoomScaleFilter);
// }
// emit PushFrame(p_frame);
// }
// }
//}
}
void ProcessMaskThread::workMaskWithUYVY(const std::shared_ptr<videoFrameData>& pImage, std::shared_ptr<MaskBuffer> mask_buffer, const bool& mask_flag)
{
auto startTime = TimeMilliSecond();
......@@ -243,6 +332,7 @@ void ProcessMaskThread::workMaskWithUYVY(const std::shared_ptr<videoFrameData>&
uint32_t width = pImage->width;
uint32_t height = pImage->height;
qint64 timestamp = (pImage->replaySeq ? pImage->replaySeq : pImage->timestamp);
bool replay_flag = (pImage->replaySeq ? true : false);
uint32_t crop_width = 0;
uint32_t crop_height = 0;
......@@ -310,6 +400,20 @@ void ProcessMaskThread::workMaskWithUYVY(const std::shared_ptr<videoFrameData>&
{
if (!err)
{
if (dynamic_flag)
{
dynamic_flag = false;
status = BS_IDEL;
memset(tmp_alpha, 0, width * height);
last_masked = nullptr;
masked_map.clear();
memset(bk_argb, 0, width * height << 2);
QString msg = "{\"signal\":\"MaskMsg\",\"data\":{\"status\":" + QString::number(BS_END) + ",\"timestamp\":" + QString::number(timestamp) + "}}";
UdpSend::GetInstance().SendUdpMsg(msg, Settings::UIIpAddr, QString::number(Settings::UIUdpPort));
}
auto t1 = TimeMilliSecond();
size_t crop_size = crop_width * crop_height << 2;
uint8_t* crop_buffer_argb = new uint8_t[crop_size];
......@@ -425,6 +529,7 @@ void ProcessMaskThread::workMaskWithUYVY(const std::shared_ptr<videoFrameData>&
size_t dst_uyvu_size = width * height << 1;
uint8_t* dst_uyvy = new uint8_t[dst_uyvu_size];
size_t once_size = (width * height >> 1);
//size_t once_size = (width * height);
size_t num = dst_uyvu_size / once_size;
omp_set_num_threads(num);
#pragma omp parallel
......@@ -443,24 +548,40 @@ void ProcessMaskThread::workMaskWithUYVY(const std::shared_ptr<videoFrameData>&
if (mask_buffer) end_flag = mask_buffer->signal;
if (map_size == 1 && status == BS_IDEL)
{
/*if (end_flag == 1)
{
if (Settings::DynamicOut <= 0)
{
status = BS_END;
clear_flag = true;
}
}*/
status = BS_START;
start_time = timestamp;
QString msg = "{\"signal\":\"MaskMsg\",\"data\":{\"status\":" + QString::number(status) + ",\"timestamp\":" + QString::number(timestamp) + "}}";
UdpSend::GetInstance().SendUdpMsg(msg, Settings::UIIpAddr, QString::number(Settings::UIUdpPort));
//qDebug() << "start copy crop pic.......\n";
//qDebug() << GetCurrDateTimeStr() << " start copy crop pic," << "start tm:" << timestamp << "\n";
}
else if (end_flag == 1 && status == BS_ING)
else if (end_flag == 1 && (status == BS_ING || status == BS_START))
{
status = BS_END;
clear_flag = true;
if (status == BS_END)
//qDebug() << GetCurrDateTimeStr() << " stop copy crop pic," << "stop tm:" << timestamp << "\n";
if (Settings::DynamicOut <= 0)
{
status = BS_END;
clear_flag = true;
//send msg to ui with udp-protocol
QString msg = "{\"signal\":\"MaskMsg\",\"data\":{\"status\":" + QString::number(status) + ",\"timestamp\":" + QString::number(timestamp) + "}}";
UdpSend::GetInstance().SendUdpMsg(msg, Settings::UIIpAddr, QString::number(Settings::UIUdpPort));
}
else status = BS_ING;
}
else
{
status = BS_ING;
//qDebug() << GetCurrDateTimeStr() << " starting copy crop pic," << "frame tm:" << timestamp << "\n";
}
else status = BS_ING;
//libyuv::ARGBBlend(bk_argb_attenuate, width << 2, src_argb , width << 2, src_argb, width << 2, width, height);
uint32_t offset_b = (mask_rect.y * width << 2) + (mask_rect.x << 2);
......@@ -469,20 +590,50 @@ void ProcessMaskThread::workMaskWithUYVY(const std::shared_ptr<videoFrameData>&
libyuv::ARGBCopy(src_bgra + offset_b, width << 2, tmp_bgra, width << 2, mask_rect.width, mask_rect.height);
libyuv::ARGBBlend(bk_argb + offset_b, width << 2, tmp_bgra, width << 2, tmp_bgra, width << 2, mask_rect.width, mask_rect.height);
libyuv::ARGBToUYVY(tmp_bgra, width << 2, tmp_uyvy, width << 1, mask_rect.width, mask_rect.height);
qint32 x_ = ((mask_rect.x + 1) >> 1 << 1);
qint32 width_ = (x_ == mask_rect.x ? (mask_rect.width >> 1 << 1) : ((mask_rect.width - 1) >> 1 << 1));
qint32 height_ = (mask_rect.height);
offset_b = (x_ == mask_rect.x ? 0 : 4);
libyuv::ARGBToUYVY(tmp_bgra + offset_b, width << 2, tmp_uyvy, width << 1, width_, height_);
offset_b = (mask_rect.y * width << 1) + (x_ << 1);
libyuv::CopyPlane(tmp_uyvy, width << 1, dst_uyvy + offset_b, width << 1, width_ * 2, height_);
/*libyuv::ARGBToUYVY(tmp_bgra, width << 2, tmp_uyvy, width << 1, mask_rect.width, mask_rect.height);
offset_b = (mask_rect.y * width << 1) + (mask_rect.x << 1);
libyuv::CopyPlane(tmp_uyvy, width << 1, dst_uyvy + offset_b, width << 1, mask_rect.width, mask_rect.height);
libyuv::CopyPlane(tmp_uyvy, width << 1, dst_uyvy + offset_b, width << 1, mask_rect.width * 2, mask_rect.height);*/
if (dynamic_flag)
{
dynamic_out_num++;
if (dynamic_out_num >= Settings::DynamicOut)
{
dynamic_flag = false;
dynamic_out_num = 0;
status = BS_END;
clear_flag = true;
//send msg to ui with udp-protocol
QString msg = "{\"signal\":\"MaskMsg\",\"data\":{\"status\":" + QString::number(status) + ",\"timestamp\":" + QString::number(timestamp) + "}}";
UdpSend::GetInstance().SendUdpMsg(msg, Settings::UIIpAddr, QString::number(Settings::UIUdpPort));
}
}
if (end_flag == 1 && Settings::DynamicOut)
{
dynamic_flag = true;
}
std::shared_ptr<VideoFrameWithMask> video_frame = std::make_shared<VideoFrameWithMask>(width, height, timestamp, pImage->sequenceNum, dst_uyvy, bmdFormat8BitYUV, status, pImage->meta_, start_time);
video_frame->replay_flag = replay_flag;
//std::shared_ptr<VideoFrameWithMask> video_frame = std::make_shared<VideoFrameWithMask>(pImage, status, start_time);
if (Settings::CropFlag && (crop_msg.crop_x || crop_msg.crop_y)) crop_queue.Push(video_frame);
if (Settings::CropFlag && /*(crop_msg.crop_x >= 0 || crop_msg.crop_y >= 0) &&*/ crop_thread) crop_thread->addVideoFrame(video_frame);
else
{
emit PushFrame(video_frame);
memset(tmp_bgra, 0, width* height << 2);
memset(tmp_uyvy, 0, width* height << 1);
}
//qDebug()<<"tm:"<< video_frame->timestamp_ <<"," << GetCurrDateTimeStr() << ",process mask frame duration:" << TimeMilliSecond() - t11 << "\n";
if (clear_flag) {
......@@ -525,10 +676,10 @@ void ProcessMaskThread::workMaskWithUYVY(const std::shared_ptr<videoFrameData>&
else
{
//emit PushFrame(video_frame);
if (Settings::CropFlag && (crop_msg.crop_x || crop_msg.crop_y))
if (Settings::CropFlag && /*(crop_msg.crop_x >=0 || crop_msg.crop_y >= 0) &&*/ crop_thread)
{
//auto t2 = TimeMilliSecond();
size_t dst_argb_size = width * height << 2;
/*size_t dst_argb_size = width * height << 2;
uint8_t* dst_argb = new uint8_t[dst_argb_size];
size_t once_size = width * height;
size_t num = dst_argb_size / once_size;
......@@ -542,14 +693,32 @@ void ProcessMaskThread::workMaskWithUYVY(const std::shared_ptr<videoFrameData>&
auto src = src_bgra + i * once_size;
memcpy(dst, src, once_size);
}
}*/
size_t dst_uyvu_size = width * height << 1;
uint8_t* dst_uyvy = new uint8_t[dst_uyvu_size];
size_t once_size = (width * height);
size_t num = dst_uyvu_size / once_size;
omp_set_num_threads(num);
#pragma omp parallel
{
#pragma omp for nowait
for (int i = 0; i < num; i++)
{
auto dst = dst_uyvy + i * once_size;
auto src = src_uyvy + i * once_size;
memcpy(dst, src, once_size);
//SSE::MemcpySSE(dst, src, once_size);
}
}
std::shared_ptr<VideoFrameWithMask> video_frame_i = std::make_shared<VideoFrameWithMask>(width, height, timestamp, pImage->sequenceNum, dst_argb, status, pImage->meta_);
crop_queue.Push(video_frame_i);
std::shared_ptr<VideoFrameWithMask> video_frame_i = std::make_shared<VideoFrameWithMask>(width, height, timestamp, pImage->sequenceNum, dst_uyvy, bmdFormat8BitYUV, pImage->flag_, pImage->meta_);
//std::shared_ptr<VideoFrameWithMask> video_frame = std::make_shared<VideoFrameWithMask>(pImage, status);
if(crop_thread) crop_thread->addVideoFrame(video_frame_i);
//qDebug() << "memcpy duration:" << TimeMilliSecond() - t2 << "\n";
}
else
{
std::shared_ptr<VideoFrameWithMask> video_frame = std::make_shared<VideoFrameWithMask>(pImage, status);
std::shared_ptr<VideoFrameWithMask> video_frame = std::make_shared<VideoFrameWithMask>(pImage, pImage->flag_);
emit PushFrame(video_frame);
}
//qDebug() << "process mask frame:" << video_frame->timestamp_ << ",cur_time:" << GetCurrDateTimeStr() << "\n";
......@@ -823,11 +992,12 @@ void ProcessMaskThread::workMaskModify(const std::shared_ptr<videoFrameData>& pI
libyuv::ARGBBlend(bk_argb + offset_b, width << 2, dst_argb + offset_b, width << 2, dst_argb + offset_b, width << 2, mask_rect.width, mask_rect.height);
std::shared_ptr<VideoFrameWithMask> video_frame = std::make_shared<VideoFrameWithMask>(width, height, timestamp, pImage->sequenceNum, dst_argb, status,pImage->meta_,start_time);
if(Settings::CropFlag && (crop_msg.crop_x || crop_msg.crop_y)) crop_queue.Push(video_frame);
/*if(Settings::CropFlag && (crop_msg.crop_x >= 0 || crop_msg.crop_y >= 0)) crop_queue.Push(video_frame);
else
{
emit PushFrame(video_frame);
}
}*/
emit PushFrame(video_frame);
if (clear_flag) {
......@@ -869,33 +1039,36 @@ void ProcessMaskThread::workMaskModify(const std::shared_ptr<videoFrameData>& pI
else
{
//emit PushFrame(video_frame);
if (Settings::CropFlag && (crop_msg.crop_x || crop_msg.crop_y))
{
//auto t2 = TimeMilliSecond();
size_t dst_argb_size = width * height << 2;
uint8_t* dst_argb = new uint8_t[dst_argb_size];
size_t once_size = width * height;
size_t num = dst_argb_size / once_size;
omp_set_num_threads(num);
#pragma omp parallel
{
#pragma omp for nowait
for (int i = 0; i < num; i++)
{
auto dst = dst_argb + i * once_size;
auto src = src_bgra + i * once_size;
memcpy(dst, src, once_size);
}
}
std::shared_ptr<VideoFrameWithMask> video_frame_i = std::make_shared<VideoFrameWithMask>(width, height, timestamp, pImage->sequenceNum, dst_argb, status, pImage->meta_);
crop_queue.Push(video_frame_i);
//qDebug() << "memcpy duration:" << TimeMilliSecond() - t2 << "\n";
}
else
{
std::shared_ptr<VideoFrameWithMask> video_frame = std::make_shared<VideoFrameWithMask>(pImage, status);
emit PushFrame(video_frame);
}
//if (Settings::CropFlag && (crop_msg.crop_x >= 0 || crop_msg.crop_y >= 0))
//{
// //auto t2 = TimeMilliSecond();
// size_t dst_argb_size = width * height << 2;
// uint8_t* dst_argb = new uint8_t[dst_argb_size];
// size_t once_size = width * height;
// size_t num = dst_argb_size / once_size;
// omp_set_num_threads(num);
// #pragma omp parallel
// {
// #pragma omp for nowait
// for (int i = 0; i < num; i++)
// {
// auto dst = dst_argb + i * once_size;
// auto src = src_bgra + i * once_size;
// memcpy(dst, src, once_size);
// }
// }
// std::shared_ptr<VideoFrameWithMask> video_frame_i = std::make_shared<VideoFrameWithMask>(width, height, timestamp, pImage->sequenceNum, dst_argb, status, pImage->meta_);
// crop_queue.Push(video_frame_i);
// //qDebug() << "memcpy duration:" << TimeMilliSecond() - t2 << "\n";
//}
//else
//{
// std::shared_ptr<VideoFrameWithMask> video_frame = std::make_shared<VideoFrameWithMask>(pImage, status);
// emit PushFrame(video_frame);
//}
std::shared_ptr<VideoFrameWithMask> video_frame = std::make_shared<VideoFrameWithMask>(pImage, status);
emit PushFrame(video_frame);
//qDebug() << "process mask frame:" << video_frame->timestamp_ << ",cur_time:" << GetCurrDateTimeStr() << "\n";
}
......
......@@ -15,7 +15,7 @@ int ProcessThread::s_count = 0;
//extern int FrameRate;
//extern int FrontDeleyTime;
//extern bool HaveBlackDataFlag;
extern std::map<qint32, qint32> map_scale_mode;
//extern std::map<qint32, qint32> map_scale_mode;
RoiMessage::RoiMessage() : h(CROPHEIGHT)
......
#include "Threads/ReplayThread.h"
#include "Utils/Settings.h"
#include "Network/UdpSend.h"
#include <omp.h>
//extern int ReplayStoreTime;
//extern int FrameRate;
ReplayThread::ReplayThread()
{
max_store_size = Settings::ReplayStoreTime / 1000 * Settings::FrameRate;
max_store_size = Settings::ReplayStoreTime * Settings::FrameRate / 1000;
max_store_back_size = Settings::ReplayBackStoreTime * Settings::FrameRate / 1000;
max_store_crop_size = Settings::ReplayCropStoreTime * Settings::FrameRate / 1000;
interval = 1000 / Settings::FrameRate;
}
......@@ -16,6 +19,20 @@ ReplayThread::~ReplayThread()
}
void ReplayThread::addCropFrame(std::shared_ptr<VideoFrameWithMask> frame)
{
if (frame && (frame->data_ || frame->pImage))
{
if (replay_params.status == RS_IDEL || replay_params.status == RS_END)
storeCropMap.Insert(frame->timestamp_,frame);
if (storeCropMap.Size() > max_store_crop_size)
{
storeCropMap.Pop();
}
}
}
void ReplayThread::addFrame(std::shared_ptr<videoFrameData> frameData)
{
if(frameData && frameData->data)
......@@ -24,13 +41,24 @@ void ReplayThread::addFrame(std::shared_ptr<videoFrameData> frameData)
{
std::unique_lock<std::mutex> ulock(mutex);
current_seq = frameData->sequenceNum;
if (replay_params.status == RS_IDEL || replay_params.status == RS_END) storeVideoMap.insert({ frameData->timestamp,frameData });
if (replay_params.status == RS_IDEL || replay_params.status == RS_END)
{
storeVideoMap.insert({ frameData->timestamp,frameData });
if(storeBackVideoMap.size()) storeBackVideoMap.erase(storeBackVideoMap.begin()->first);
}
else if (replay_params.status == RS_PRE_START)
{
if (storeBackVideoMap.size() < max_store_back_size) storeBackVideoMap.insert({ frameData->timestamp,frameData });
//if (storeBackVideoMap.size() > max_store_back_size) storeBackVideoMap.erase(storeBackVideoMap.begin()->first);
}
if (storeVideoMap.size() > max_store_size)
{
storeVideoMap.erase(storeVideoMap.begin()->first);
}
if ((replay_params.status == RS_START || replay_params.status == RS_RE_START)) {
if (send_err_flag) send_err_flag = false;
//if (send_err_flag) send_err_flag = false;
cv.notify_all();
}
......@@ -40,12 +68,30 @@ void ReplayThread::addFrame(std::shared_ptr<videoFrameData> frameData)
void ReplayThread::recvReplayParams(const ReplayParams& params)
{
std::unique_lock<std::mutex> ulock(mutex);
replay_params = params;
{
//std::unique_lock<std::mutex> ulock(mutex);
replay_params.status = params.status.load();
replay_params.start_time = params.start_time;
replay_params.end_time = params.end_time;
}
//if(replay_flag != flag) replay_flag = flag;
/*if (replay_params.status == RS_END && storeBackVideoMap.size())
{
storeBackVideoMap.clear();
}*/
if((replay_params.status == RS_START || replay_params.status == RS_RE_START) && !replayVideoVec.empty())
{
if (send_err_flag) send_err_flag = false;
replay_position = 0;
replay_sdi_position = 0;
/*if (storeBackVideoMap.size())
{
if (storeBackVideoMap.begin()->first < params.start_time)
{
storeBackVideoMap.clear();
}
}*/
}
}
bool ReplayThread::CanReplay(const ReplayParams& params)
......@@ -66,85 +112,259 @@ bool ReplayThread::CanReplay(const ReplayParams& params)
}
CropThread* ReplayThread::GetCropThread()
{
if (!crop_thread)
{
crop_thread = std::make_shared<CropThread>(this);
if (crop_thread)
{
crop_thread->start();
send_frame_thread = std::thread(&ReplayThread::SendFrameFunc,this);
return crop_thread.get();
}
else return NULL;
};
return crop_thread.get();
}
void ReplayThread::OnRecvViedoFrame(std::shared_ptr<VideoFrameWithMask> crop_frame)
{
if (crop_frame)
{
storeCropMap.EnsureInsert(crop_frame->timestamp_, crop_frame);
replayVideoSdiVec.push_back(crop_frame);
}
}
void ReplayThread::OnChange()
{
}
void ReplayThread::SendFrameFunc()
{
while (true)
{
std::shared_ptr<videoFrameData> frame = nullptr;
if (needCropQueue.WaitFor(frame))
{
if (!frame) continue;
auto width = frame->width;
auto height = frame->height;
size_t dst_uyvu_size = width * height << 1;
uint8_t* dst_uyvy = new uint8_t[dst_uyvu_size];
size_t once_size = (width * height);
size_t num = dst_uyvu_size / once_size;
omp_set_num_threads(num);
#pragma omp parallel
{
#pragma omp for nowait
for (int i = 0; i < num; i++)
{
auto dst = dst_uyvy + i * once_size;
auto src = frame->uyvy_data + i * once_size;
memcpy(dst, src, once_size);
//SSE::MemcpySSE(dst, src, once_size);
}
}
std::shared_ptr<VideoFrameWithMask> video_frame_i = std::make_shared<VideoFrameWithMask>(width, height, frame->timestamp, frame->sequenceNum, dst_uyvy, bmdFormat8BitYUV, frame->flag_, frame->meta_);
//std::shared_ptr<VideoFrameWithMask> video_frame = std::make_shared<VideoFrameWithMask>(pImage, status);
if (crop_thread) crop_thread->addVideoFrame(video_frame_i);
}
}
}
void ReplayThread::run()
{
std::shared_ptr<videoFrameData> resend_frame = nullptr;
qint32 resend_num = 0;
qint64 resend_tm = 0;
bool resend_start = false;
std::atomic_uint64_t in_tm = 0,out_tm = 0;
std::atomic_int32_t dy_in_count = Settings::ReplayForward.load(), dy_out_count = Settings::ReplayDeley;
while (true)
{
{
std::unique_lock<std::mutex> ulock(mutex);
cv.wait(ulock);
}
if (last_replay_params.start_time != replay_params.start_time ||
last_replay_params.end_time != replay_params.end_time)
if (0 == replay_position && (last_replay_params.start_time != replay_params.start_time ||
last_replay_params.end_time != replay_params.end_time ||
dy_in_count != Settings::ReplayForward ||
dy_out_count < Settings::DynamicOut))
{
/*std::queue<std::shared_ptr<videoFrameData>> empty;
if (!replayVideoQueue1.empty()) std::swap(empty, replayVideoQueue1);
if (!replayVideoQueue2.empty()) std::swap(empty, replayVideoQueue2);*/
if (!replayVideoVec.empty()) std::vector<std::shared_ptr<videoFrameData>>().swap(replayVideoVec);
if (!replayVideoSdiVec.empty()) std::vector<std::shared_ptr<VideoFrameWithMask>>().swap(replayVideoSdiVec);
if (!replayVideoCropVec.empty()) std::vector<std::shared_ptr<videoFrameData>>().swap(replayVideoCropVec);
replay_position = 0;
last_replay_params = replay_params;
replay_sdi_position = 0;
last_replay_params.status = replay_params.status.load();
last_replay_params.start_time = replay_params.start_time;
last_replay_params.end_time = replay_params.end_time;
dy_in_count = Settings::ReplayForward.load();
dy_out_count = Settings::DynamicOut.load();
Settings::ReplayDeley = dy_out_count;
}
if (replayVideoVec.empty())
{
//qint32 frame_nums = replay_params.end_time - replay_params.end_time + 1;
//auto t1 = TimeMilliSecond();
auto tm_begin = replay_params.start_time - Settings::ReplayForward;
auto tm_end = replay_params.end_time + Settings::ReplayDeley;
in_tm = replay_params.start_time;
out_tm = replay_params.end_time;
auto in_itor = storeVideoMap.find(in_tm);
auto out_itor = storeVideoMap.find(out_tm);
if (in_itor == storeVideoMap.end() || out_itor == storeVideoMap.end() || in_tm >= out_tm)
{
if (!send_err_flag)
{
send_err_flag = true;
qint32 errNo = (out_itor == storeVideoMap.end() ? 2 : (in_itor == storeVideoMap.end() ? 1 : 3));
QString msg = "{\"signal\":\"ReplayResp\",\"data\":{\"inTime\":" + QString::number(in_tm) + ",\"outTime\":" + QString::number(out_tm) +
",\"status\":0,\"errNo\":" + QString::number(errNo) + "}}";
UdpSend::GetInstance().SendUdpMsg(msg, Settings::UIIpAddr, QString::number(Settings::UIUdpPort));
qDebug() << "\n replay fail,errno:" << errNo << ",replay params in_tm:" << in_tm << ",out_tm:" << out_tm << "\n";
}
continue;
}
qint64 rp_begin = in_tm - dy_in_count;
qint64 rp_end = out_tm + dy_out_count;
auto begin_tm = storeVideoMap.begin()->first;
auto end_tm = storeVideoMap.rbegin()->first;
qint64 back_end_tm = 0,back_begin_tm = 0;
bool find_back_flag = false;
if (storeBackVideoMap.size())
{
back_begin_tm = storeBackVideoMap.begin()->first;
back_end_tm = storeBackVideoMap.rbegin()->first;
}
if (tm_begin < begin_tm) tm_begin = begin_tm;
if (tm_end > end_tm) tm_end = end_tm;
size_t size = tm_end - tm_begin + 1;
if (rp_begin < begin_tm) rp_begin = begin_tm;
if (rp_end > end_tm)
{
if (end_tm < back_begin_tm)
{
if (rp_end > back_end_tm) rp_end = back_end_tm;
find_back_flag = true;
}
else rp_end = end_tm;
}
//size_t size = tm_end - tm_begin + 1;
/*auto& tm_end = replay_params.end_time;
auto& tm_begin = replay_params.start_time;*/
auto itor_end = storeVideoMap.find(tm_end);
auto itor_begin = storeVideoMap.find(tm_begin);
auto itor_begin = storeVideoMap.lower_bound(rp_begin);
std::map<qint64, std::shared_ptr<videoFrameData>>::iterator itor_end;
if(find_back_flag)itor_end = storeBackVideoMap.lower_bound(rp_end);
else itor_end = storeVideoMap.lower_bound(rp_end);
if (itor_end == storeVideoMap.end() || itor_begin == storeVideoMap.end() || tm_end <= tm_begin)
/*if (itor_end == storeVideoMap.end() || itor_begin == storeVideoMap.end() || tm_end <= tm_begin)
{
if (!send_err_flag)
{
send_err_flag = true;
qint32 errNo = (itor_end == storeVideoMap.end() ? 2 : (itor_begin == storeVideoMap.end() ? 1 : 3));
QString msg = "{\"signal\":\"ReplayResp\",\"data\":{\"inTime\":" + QString::number(replay_params.start_time) + ",\"outTime\":" + QString::number(replay_params.end_time) +
QString msg = "{\"signal\":\"ReplayResp\",\"data\":{\"inTime\":" + QString::number(in_tm) + ",\"outTime\":" + QString::number(out_tm) +
",\"status\":0,\"errNo\":" + QString::number(errNo) + "}}";
UdpSend::GetInstance().SendUdpMsg(msg, Settings::UIIpAddr, QString::number(Settings::UIUdpPort));
qDebug() << "replay fail,errno:" << errNo << "\n";
qDebug() << "\n replay fail,errno:" << errNo<<",replay params in_tm:"<<in_tm<<",out_tm:"<<out_tm<<",store map begin tm:"<<begin_tm <<",end tm:"<<end_tm << "\n";
}
continue;
}
bool first = true;
}*/
/*bool first = true;
resend_frame = nullptr;
resend_num = (Settings::SDIOneFrameDuration > Settings::SDIOneFrameDuration ? Settings::SDIOneFrameDuration : Settings::SDIOneFrameDuration);
for (auto itor = itor_begin;; itor++)
resend_num = (Settings::SDIOneFrameDuration > Settings::NdiOneFrameDuration ? Settings::SDIOneFrameDuration : Settings::NdiOneFrameDuration);*/
auto ndi_begin_tm = in_tm - Settings::ReplayForwardForInNDI;
auto ndi_itor = storeVideoMap.lower_bound(ndi_begin_tm);
if (ndi_begin_tm > rp_begin)
{
for (auto itor = itor_begin;; itor++)
{
if (itor == in_itor)
{
break;
}
auto frame = itor->second;
//emit PushFrameForSdi(video_frame);
if (Settings::CropFlag)
{
std::shared_ptr<VideoFrameWithMask> crop_frame = nullptr;
if (storeCropMap.Find(itor->first, crop_frame))
{
if (crop_frame->crop_msg.equal(Settings::CropMsg))
{
replayVideoSdiVec.push_back(crop_frame);
}
else needCropQueue.Push(frame);
}
else
{
needCropQueue.Push(frame);
}
}
else
{
std::shared_ptr<VideoFrameWithMask> video_frame = std::make_shared<VideoFrameWithMask>(frame, frame->flag_);
video_frame->replay_flag = true;
replayVideoSdiVec.emplace_back(video_frame);
}
replayVideoCropVec.push_back(frame);
//replayVideoVec.emplace_back(itor->second);
/*if (itor->first > replay_params.end_time && first && resend_num)
{
resend_frame = itor->second;
first = false;
}*/
/*if (itor->first >= ndi_tm_begin)
{
replayVideoNdiVec.emplace_back(itor->second);
}*/
}
}
for (auto itor = ndi_itor;itor != storeVideoMap.end(); itor++)
{
replayVideoVec.emplace_back(itor->second);
if (itor->first > replay_params.end_time && first)
if (!find_back_flag)
{
resend_frame = itor->second;
first = false;
if (itor == itor_end)
{
break;
}
}
if (itor == itor_end)
}
if (find_back_flag)
{
for (auto itor = storeBackVideoMap.begin(); itor != storeBackVideoMap.end(); itor++)
{
break;
replayVideoVec.emplace_back(itor->second);
if (itor == itor_end)
{
break;
}
}
}
if (replayVideoVec.size() != size)
/*if (replayVideoVec.size() != size)
{
qDebug() << "replay find size err ..................." << "\n";
}
}*/
//qDebug() << "replay find duration:" << TimeMilliSecond() - t1 << "\n";
//replay_position = replayVideoVec.size() - 1;
......@@ -165,15 +385,45 @@ void ReplayThread::run()
replayVideoVec.emplace_back(itor_top->second);*/
}
if (!replayVideoSdiVec.empty() && replay_sdi_position < replayVideoSdiVec.size())
{
if (!replayVideoSdiVec[0]->crop_msg.equal(Settings::CropMsg))
{
std::vector<std::shared_ptr<VideoFrameWithMask>>().swap(replayVideoSdiVec);
for (auto frame : replayVideoCropVec)
{
if (Settings::CropFlag)
{
needCropQueue.Push(frame);
}
else
{
std::shared_ptr<VideoFrameWithMask> video_frame = std::make_shared<VideoFrameWithMask>(frame, frame->flag_);
video_frame->replay_flag = true;
replayVideoSdiVec.emplace_back(video_frame);
}
}
}
else
{
std::shared_ptr<VideoFrameWithMask> frame = replayVideoSdiVec[replay_sdi_position];
if (frame) emit PushFrameForSdi(frame);
replay_sdi_position++;
}
}
if (!replayVideoVec.empty())
{
std::shared_ptr<videoFrameData> frame = nullptr;
frame = (resend_frame ? (resend_start ? resend_frame : replayVideoVec[replay_position] ) : replayVideoVec[replay_position]);
//frame = (resend_frame ? (resend_start ? resend_frame : replayVideoVec[replay_position] ) : replayVideoVec[replay_position]);
frame = replayVideoVec[replay_position];
frame->replaySeq = current_seq;
//frame->flag_ = (frame->timestamp < in_tm ? BS_RP_START_FRONT : (frame->timestamp > out_tm ? BS_RP_END_BACK : BS_IDEL));
if(frame) emit PushFrame(frame);
if ((!resend_start || !resend_frame) && replay_position < (replayVideoVec.size() - 1)) replay_position++;
if (/*(!resend_start || !resend_frame) &&*/ replay_position < (replayVideoVec.size() - 1)) replay_position++;
if (resend_frame)
/*if (resend_frame)
{
if (resend_start && resend_num) resend_num--;
if (!resend_start && resend_num && frame->timestamp == resend_frame->timestamp)
......@@ -183,9 +433,9 @@ void ReplayThread::run()
if (resend_num <= 0 && resend_start)
{
resend_start = false;
resend_num = (Settings::SDIOneFrameDuration > Settings::SDIOneFrameDuration ? Settings::SDIOneFrameDuration : Settings::SDIOneFrameDuration);
resend_num = (Settings::SDIOneFrameDuration > Settings::NdiOneFrameDuration ? Settings::SDIOneFrameDuration : Settings::NdiOneFrameDuration);
}
}
}*/
//qDebug() << "replay time:" << GetCurrDateTimeStr() << ",send frame timecode:" << frame->timestamp << ",replayseq:" << frame->replaySeq << "\n";
/*if (replay_position == 0)
......
......@@ -412,8 +412,16 @@ bool ZoomThread::PrepareZoom(std::shared_ptr<VideoFrameWithMask> src_frame)
if (vec_points.size() <= 0) return false;
size_t step = src_w * src_h;
Memory::MemoryCopy4k(src_frame->data_, bgra_mat.data, step);
if (src_frame->fmt_ == bmdFormat8BitYUV)
{
libyuv::UYVYToARGB(src_frame->data_, src_w << 1, bgra_mat.data, src_w << 2, src_w, src_h);
}
else if (src_frame->fmt_ == bmdFormat8BitARGB)
{
size_t step = src_w * src_h;
Memory::MemoryCopy4k(src_frame->data_, bgra_mat.data, step);
}
uint32_t point_size = vec_points.size();
fir_point = vec_points[0];
end_point = vec_points[point_size - 1];
......
......@@ -13,6 +13,8 @@
#include "Utils/Common.h"
#include "Utils/Settings.h"
#include "Network/UdpSend.h"
#include "Utils/Computer.h"
#include "Utils/yuv4k.h"
qint64 StartTimeStamp = 0;
......@@ -20,8 +22,8 @@ std::map<qint32, qint32> map_scale_mode;
int main_ver = 2;
int mid_ver = 1;
int small_ver = 1;
int mid_ver = 2;
int small_ver = 6;
//int ReplayStoreTime = 10000;//单位ms
//int FrontDeleyTime = 3000;//单位ms
......@@ -178,6 +180,17 @@ MomentaMedia::MomentaMedia(QWidget *parent)
}
// do other things
}
file_thread = new QThread();
open_file = new COpenFile();
QPushButton* openFileBtn = ui.openFile;
open_file->moveToThread(file_thread);
connect(openFileBtn, &QPushButton::clicked, this, &MomentaMedia::ShowFile);
PicPathEdit = ui.showPathEdit;
connect(this, &MomentaMedia::StartOpenFile,open_file, &COpenFile::doOpenFile);
connect(open_file,&COpenFile::tellFilePath,this, &MomentaMedia::OnWritePath);
file_thread->start();
connect(open_file, &COpenFile::SendPicData, OutputDevicePages[0], &DeckLinkOutputPage::ForwardPicData);
show();
Setup();
......@@ -437,6 +450,12 @@ void MomentaMedia::RemoveDevice(ComPtr<IDeckLink>& deckLink)
}
}
void MomentaMedia::ShowFile()
{
//if (Settings::PicData) return;
emit StartOpenFile();
}
void MomentaMedia::HaltStream(ComPtr<IDeckLinkProfile> profile)
{
ComPtr<IDeckLink> deviceToStop;
......@@ -578,6 +597,7 @@ void MomentaMedia::RequestOutputDeviceIfAvailable(DeckLinkOutputPage* page, ComP
}
page->RequestedDeviceGranted(deckLink);
ReadPic(page);
END_SLOT_TIME_COUNTER
}
......@@ -697,6 +717,11 @@ void MomentaMedia::DeleyTimeTextChanged(const QString& time)
END_SLOT_TIME_COUNTER
}
void MomentaMedia::OnWritePath(const QString& path)
{
if (PicPathEdit) PicPathEdit->setText(path);
}
void MomentaMedia::ReadSettings()
{
QString exe_dir = QCoreApplication::applicationDirPath();
......@@ -707,7 +732,11 @@ void MomentaMedia::ReadSettings()
if (OutputDeleyTime < 1 * 1000) OutputDeleyTime = 1 * 1000;
else if (OutputDeleyTime > 20 * 1000) OutputDeleyTime = 20 * 1000;*/
Settings::ReplayStoreTime = settings.value("REPLAY_STORE_TIME","").toInt();
if (Settings::ReplayStoreTime < 0) Settings::ReplayStoreTime = 10000;
if (Settings::ReplayStoreTime <= 0) Settings::ReplayStoreTime = 10000;
Settings::ReplayBackStoreTime = settings.value("REPLAY_BACK_STORE_TIME", 3000).toInt();
if (Settings::ReplayBackStoreTime <= 0) Settings::ReplayStoreTime = 3000;
Settings::ReplayCropStoreTime = settings.value("REPLAY_STORE_CROP_TIME", 5000).toInt();
if (Settings::ReplayCropStoreTime <= 0) Settings::ReplayCropStoreTime = 5000;
Settings::FrontDeleyTime = settings.value("FRONT_DELEY_TIME", "").toInt();
if (Settings::FrontDeleyTime < 0 ) Settings::FrontDeleyTime = 1000;
......@@ -735,22 +764,22 @@ void MomentaMedia::ReadSettings()
// map_output_delay[i] = value;
//}
Settings::ScaleMode = settings.value("SCALE_MODE", "").toInt();
/*Settings::ScaleMode = settings.value("SCALE_MODE", "").toInt();
if (Settings::ScaleMode < 0) Settings::ScaleMode = 0;
else if (Settings::ScaleMode > 3) Settings::ScaleMode = 3;
for (int i = 1;i <= 4;i++)
else if (Settings::ScaleMode > 3) Settings::ScaleMode = 3;*/
/*for (int i = 1;i <= 4;i++)
{
std::string key = "SCALE_MODE_" + std::to_string(i);
int value = settings.value(QString::fromStdString(key), "").toInt();
if (value < 0) value = 0;
else if (value > 3) value = 3;
map_scale_mode[i] = value;
}
}*/
Settings::AspecNum = settings.value("ASPEC_NUM", "").toInt();
/*Settings::AspecNum = settings.value("ASPEC_NUM", "").toInt();
Settings::AspecDen = settings.value("ASPEC_DEN", "").toInt();
auto selectText = std::to_string(Settings::AspecNum) + ":" + std::to_string(Settings::AspecDen);
if (selectText == STR16B9) Settings::HaveBlackDataFlag = false;
if (selectText == STR16B9) Settings::HaveBlackDataFlag = false;*/
//qDebug() << "deleyTime=" << deleyTime << endl;
Settings::DrawFlag = settings.value("ZOOM_DRAW", "").toInt();
Settings::ZoomFlag = settings.value("ZOOM_FLAG", "").toInt();
......@@ -767,30 +796,79 @@ void MomentaMedia::ReadSettings()
Settings::ZoomMoveWaitCnt = settings.value("ZOOM_MOVE_WAIT", "25").toInt();
Settings::ZoomOutWaitCnt = settings.value("ZOOM_OUT_WAIT", "25").toInt();
Settings::OneFrameDuration = settings.value("NDIONEFRAMEDURATION").toInt();
Settings::NdiOneFrameDuration = settings.value("NDIONEFRAMEDURATION").toInt();
Settings::RecordStoreDuration = settings.value("RECORD_STORE_TIME").toInt();
Settings::RecordFlag = settings.value("RECORDFLAG").toInt();
Settings::SDIOneFrameDuration = settings.value("SDIONEFRAMEDURATION").toInt();
Settings::DynamicOut = settings.value("DYNAMIC_OUT",50).toInt();
Settings::OpenOMP = settings.value("OPENOMP").toInt();
Settings::TimeoutFrames = settings.value("TIMEOUTFRAMES").toInt();
if (Settings::TimeoutFrames <= 0) Settings::TimeoutFrames = 50;
if (Settings::TimeoutFrames <= 0) Settings::TimeoutFrames = 100;
Settings::SecondSdiOutWaitNums = settings.value("SECONDDELEYSDINUMS",100).toInt();
Settings::SecondFlag = settings.value("SECONDFLAG", 0).toInt();
if (Settings::SecondFlag < 0) Settings::SecondFlag = 0;
Settings::SecondSdiOutWaitNums = settings.value("SECONDSDIWAITNUM", 1).toInt();
if (Settings::SecondSdiOutWaitNums <= 0) Settings::SecondSdiOutWaitNums = 1;
Settings::SecondSdiOutWaitNumsFront = settings.value("SECONDDELEYSDINUMSFRONT",50).toInt();
if (Settings::SecondSdiOutWaitNumsFront <= 0) Settings::SecondSdiOutWaitNumsFront = 1;
Settings::SecondSdiOutWaitNumsBack = settings.value("SECONDDELEYSDINUMSBACK", 50).toInt();
if (Settings::SecondSdiOutWaitNumsBack < 0) Settings::SecondSdiOutWaitNumsBack = 0;
Settings::ReplayForward = settings.value("REPLAY_START_FORWARD", 5).toInt();
Settings::ReplayDeley = settings.value("REPLAY_END_DELEY", 5).toInt();
Settings::ReplayForwardForInNDI = settings.value("REPLAY_IN_NDI_FORWARD", 1).toInt();
Settings::RTSDIOutputWaitNums = settings.value("RTSDIOUTPUTWAITNUMS", 5).toInt();
Settings::CropFlag = settings.value("CROPRECORD", 0).toInt();
//Settings::CropFlag = settings.value("CROPRECORD", 0).toInt();
Settings::CropX = settings.value("CROP_X", 0).toInt();
Settings::CropY = settings.value("CROP_Y", 0).toInt();
Settings::CropDirection = settings.value("CROP_DIRECTION",1).toInt();
Settings::UIUdpPort = settings.value("UI_UDP_PORT", 8100).toInt();
Settings::UIIpAddr = settings.value("UI_IP_ADDR", "127.0.0.1").toString();
Settings::UIIpAddr = Computer::GetIPv4();
if(Settings::UIIpAddr == "") Settings::UIIpAddr = settings.value("UI_IP_ADDR", "127.0.0.1").toString();
Settings::MaxDynamicOut = settings.value("MAXDYNAMICOUT", 150).toInt();
Settings::MaxDynamicIn = settings.value("MAXDYNAMICOUT", 150).toInt();
Settings::MaxStaticFrame = settings.value("MAXSTATICFRAME",150).toInt();
//Settings::UsePicFlag = settings.value("USE_PIC_FLAG", 0).toInt();
settings.endGroup();
}
void MomentaMedia::ReadPic(DeckLinkOutputPage* page)
{
auto path = QCoreApplication::applicationDirPath() + "/pic/frame.png";
if (QFile::exists(path))
{
std::string str_path = path.toLocal8Bit();
auto bk_mat = cv::imread(str_path, cv::IMREAD_COLOR);
if (bk_mat.data == nullptr)
{
qDebug() << "read pic fail........." << "\n";
return;
}
if (bk_mat.cols != K4WIDTH || bk_mat.rows != K4HEIGHT) return;
PicPathEdit->setText(path);
//cv::cvtColor(bk_mat, bk_mat, cv::COLOR_RGB2RGBA);
qint32 size = K4WIDTH * K4HEIGHT << 1;
uint8_t* data = new uint8_t[K4WIDTH * K4HEIGHT << 1];
cv::cvtColor(bk_mat, bk_mat, cv::COLOR_BGR2BGRA);
Yuv4k::ARGBToUYVY4K(bk_mat.data, K4WIDTH * K4HEIGHT, data, K4WIDTH * K4HEIGHT >> 1, K4WIDTH >> 1, K4HEIGHT >> 1);
std::shared_ptr<VideoFrameWithMask> frame = std::make_shared<VideoFrameWithMask>(K4WIDTH, K4HEIGHT, data, bmdFormat8BitYUV, BS_IDEL, kDefaultHLGBT2020HDRMetadata);
if (page)
{
page->ForwardPicData(frame);
}
}
}
\ No newline at end of file
#include "Utils/Computer.h"
#include <qhostinfo.h>
QString Computer::GetIPv4()
{
QHostInfo hostInfo = QHostInfo::fromName(QHostInfo::localHostName());
QList<QHostAddress> ipAddressesList;
foreach(QHostAddress address, hostInfo.addresses()) {
if (address.protocol() == QAbstractSocket::IPv4Protocol) {
//return address.toString();
ipAddressesList.append(address);
}
}
if (ipAddressesList.size() > 1 || ipAddressesList.size() <= 0) return "";
else
{
auto address = ipAddressesList[0];
return address.toString();
}
}
\ No newline at end of file
#include "Utils/Settings.h"
int32_t Settings::ReplayStoreTime = 10000;
int32_t Settings::ReplayBackStoreTime = 3000;
int32_t Settings::ReplayCropStoreTime = 5000;
int32_t Settings::FrontDeleyTime = 3000;
int32_t Settings::FrameRate = 50;
int32_t Settings::OutputPlayMode = 0;
int32_t Settings::AudioChannel = 2;
int32_t Settings::ScaleMode = 0;
int32_t Settings::AspecNum = 0;
int32_t Settings::AspecDen = 0;
//int32_t Settings::ScaleMode = 0;
int32_t Settings::AspecNum = 16;
int32_t Settings::AspecDen = 9;
std::atomic_int32_t Settings::NdiOneFrameDuration = 50;
std::atomic_int32_t Settings::SDIOneFrameDuration = 50;
std::atomic_int32_t Settings::DynamicOut = 50;//DYNAMIC OUT
int32_t Settings::MaxDynamicOut = 150;
int32_t Settings::MaxDynamicIn = 150;
int32_t Settings::MaxStaticFrame = 150;
int32_t Settings::OneFrameDuration = 50;
int32_t Settings::SDIOneFrameDuration = 50;
int32_t Settings::RecordStoreDuration = 10000;
int32_t Settings::RecordFlag = 0;
int32_t Settings::OpenOMP = 1;
int32_t Settings::TimeoutFrames = 50;
int32_t Settings::SecondSdiOutWaitNums = 100;
int32_t Settings::TimeoutFrames = 100;
int32_t Settings::SecondFlag = 0;
int32_t Settings::SecondSdiOutWaitNums = 1;
int32_t Settings::SecondSdiOutWaitNumsFront = 50;
int32_t Settings::SecondSdiOutWaitNumsBack = 50;
bool Settings::HaveBlackDataFlag = false;
int32_t Settings::DrawFlag = 0;
......@@ -34,15 +46,22 @@ uint32_t Settings::ZoomInWaitCnt = 0;
uint32_t Settings::ZoomMoveWaitCnt = 0;
uint32_t Settings::ZoomOutWaitCnt = 0;
uint32_t Settings::ReplayForward = 0;
uint32_t Settings::ReplayDeley = 0;
std::atomic_int32_t Settings::ReplayForward = 0;
int32_t Settings::ReplayDeley = 0;
int32_t Settings::ReplayForwardForInNDI = 0;
uint32_t Settings::RTSDIOutputWaitNums = 0;
int32_t Settings::CropFlag = 0;
std::atomic_bool Settings::CropFlag = false;
int32_t Settings::CropX = 0;
int32_t Settings::CropY = 0;
int32_t Settings::CropDirection = 1;
int32_t Settings::UIUdpPort = 8100;
QString Settings::UIIpAddr = "127.0.0.1";
\ No newline at end of file
QString Settings::UIIpAddr = "127.0.0.1";
std::atomic_bool Settings::UsePicFlag = false;
QString Settings::PicPath = "";
CropMessage Settings::CropMsg = {0,0,K4WIDTH,K4HEIGHT};
//uint8_t* Settings::PicData = NULL;
//std::atomic_bool Settings::PicFlag = false;
......@@ -81,11 +81,12 @@ int main(int argc, char *argv[])
qRegisterMetaType<MaskBuffer>("MaskBuffer");
qRegisterMetaType<MaskBuffer>("MaskBuffer&"); //
qRegisterMetaType<std::shared_ptr<MaskBuffer>>("std::shared_ptr<MaskBuffer>");
qRegisterMetaType<std::shared_ptr<PicFrameData>>("std::shared_ptr<PicFrameData>");
qRegisterMetaType<SportAttribute>("SportAttribute");
qRegisterMetaType<SportAttribute>("SportAttribute&"); //
qRegisterMetaType<std::shared_ptr<SportAttribute>>("std::shared_ptr<SportAttribute>");
qRegisterMetaType<ReplayParams>("ReplayParams");
qRegisterMetaType<ReplayParams>("ReplayParams&");
/*qRegisterMetaType<ReplayParams>("ReplayParams");
qRegisterMetaType<ReplayParams>("ReplayParams&");*/
/*FILE* fp = fopen("D:/png/1.txt", "rb");
if(fp)
{
......
No preview for this file type
No preview for this file type
......@@ -3,15 +3,53 @@ objct name changed "deviceOutputPage4"
available device "DeckLink 8K Pro (1)"
available device "DeckLink 8K Pro (2)"
available device "DeckLink 8K Pro (3)"
"2024-05-27 15:23:37.946" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
available device "DeckLink 8K Pro (4)"
"2024-05-27 15:23:37.966" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-27 15:23:38.009" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-27 15:23:38.029" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-27 15:23:38.049" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-27 15:23:38.069" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-27 15:23:38.089" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-27 15:23:38.109" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-27 15:23:38.129" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-27 15:23:38.187" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-05-27 15:23:39.211" decklink input fps 50
"2024-06-12 16:29:10.935" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-06-12 16:29:10.983" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-06-12 16:29:11.003" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-06-12 16:29:11.023" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-06-12 16:29:11.043" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-06-12 16:29:11.063" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-06-12 16:29:11.083" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-06-12 16:29:11.103" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
"2024-06-12 16:29:11.160" index: 0 DeckLinkInputDevice get video frame No input source 80000000 ------------
QObject: Cannot create children for a parent that is in a different thread.
(Parent is QUdpSocket(0x2825ed451b0), parent's thread is QThread(0x28259259b50), current thread is UdpSend(0x7ff656f050c0)"2024-06-12 16:30:03.192" decklink output fps 13 , qsize 0
"2024-06-12 16:30:40.885" decklink output fps 29 , qsize 4
"2024-06-12 16:30:41.992" decklink output fps 11 , qsize 0
"2024-06-12 16:30:46.032" decklink output fps 36 , qsize 0
"2024-06-12 16:32:45.225" decklink output fps 25 , qsize 3
"2024-06-12 16:33:23.372" decklink output fps 6 , qsize 0
"2024-06-12 16:33:31.893" decklink output fps 23 , qsize 0
"2024-06-12 16:33:48.573" decklink output fps 30 , qsize 0
"2024-06-12 16:34:03.173" decklink output fps 3 , qsize 0
"2024-06-12 16:34:08.695" decklink output fps 24 , qsize 0
"2024-06-12 16:34:12.775" decklink output fps 26 , qsize 0
"2024-06-12 16:34:17.473" decklink output fps 35 , qsize 0
"2024-06-12 16:34:36.894" decklink output fps 14 , qsize 0
"2024-06-12 16:35:17.394" decklink output fps 40 , qsize 0
"2024-06-12 16:35:37.994" decklink output fps 9 , qsize 0
"2024-06-12 16:35:59.014" decklink output fps 23 , qsize 0
"2024-06-12 16:36:19.415" decklink output fps 38 , qsize 0
"2024-06-12 16:36:39.555" decklink output fps 25 , qsize 0
"2024-06-12 16:37:00.955" decklink output fps 44 , qsize 0
"2024-06-12 16:37:22.235" decklink output fps 44 , qsize 0
"2024-06-12 16:37:43.354" decklink output fps 23 , qsize 0
"2024-06-12 16:38:02.515" decklink output fps 34 , qsize 0
"2024-06-12 16:38:24.755" decklink output fps 32 , qsize 0
"2024-06-12 16:38:46.095" decklink output fps 37 , qsize 0
"2024-06-12 16:39:06.935" decklink output fps 17 , qsize 0
"2024-06-12 16:39:27.375" decklink output fps 41 , qsize 0
"2024-06-12 16:39:48.575" decklink output fps 29 , qsize 0
"2024-06-12 16:40:08.535" decklink output fps 25 , qsize 0
"2024-06-12 16:40:28.415" decklink output fps 22 , qsize 0
"2024-06-12 16:40:48.575" decklink output fps 29 , qsize 0
"2024-06-12 16:41:10.816" decklink output fps 32 , qsize 0
"2024-06-12 16:41:32.096" decklink output fps 31 , qsize 0
"2024-06-12 16:41:52.896" decklink output fps 11 , qsize 0
"2024-06-12 16:42:15.776" decklink output fps 11 , qsize 0
"2024-06-12 16:42:37.857" decklink output fps 22 , qsize 0
"2024-06-12 16:42:59.798" decklink output fps 17 , qsize 0
"2024-06-12 16:43:19.877" decklink output fps 21 , qsize 0
"2024-06-12 16:43:39.537" decklink output fps 21 , qsize 0
"2024-06-12 16:43:59.917" decklink output fps 41 , qsize 0
[DELEYTIME]
REPLAY_STORE_TIME=10000
REPLAY_BACK_STORE_TIME=5000
REPLAY_STORE_CROP_TIME=5000
RECORD_STORE_TIME=10000
FRONT_DELEY_TIME=1000
FRAME_RATE=50
PRVW_FLAG=0
NDIONEFRAMEDURATION=50
SDIONEFRAMEDURATION=50
DYNAMIC_OUT=50
RECORDFLAG=0
OPENOMP=1
TIMEOUTFRAMES=250
SECONDFLAG=1
SECONDSDIWAITNUM=1
SECONDDELEYSDINUMSFRONT=50
SECONDDELEYSDINUMSBACK=0
REPLAY_START_FORWARD=50
REPLAY_END_DELEY=150
REPLAY_IN_NDI_FORWARD=5
MAXDYNAMICIN=150
MAXDYNAMICOUT=150
MAXSTATICFRAME=150
RTSDIOUTPUTWAITNUMS=1
UI_UDP_PORT=8100
UI_IP_ADDR=0.0.0.0
CROPRECORD=0
CROP_X=3200
CROP_Y=1800
CROP_DIRECTION=4
USE_PIC_FLAG=1
OUTPUT_PLAY_MODE=0
BLACK_BOTTOM_HEIGHT=0
AUDIO_CHANNEL=2
ASPEC_DEN=9
ASPEC_NUM=16
SCALE_MODE=0
SCALE_MODE_1=0
SCALE_MODE_2=0
SCALE_MODE_3=0
SCALE_MODE_4=0
DELEY_TIME=2000
DELEY_TIME_1=5000
DELEY_TIME_2=14000
DELEY_TIME_3=5000
DELEY_TIME_4=6000
BLACK_BOTTOM_HEIGHT_1=16
BLACK_BOTTOM_HEIGHT_2=20
BLACK_BOTTOM_HEIGHT_3=20
BLACK_BOTTOM_HEIGHT_4=30
ZOOM_DRAW=0
ZOOM_FLAG=0
ZOOM_SCALE=1.5
......@@ -37,18 +47,3 @@ ZOOM_OUT_WAIT=15
ZOOM_USE_OMP=1
ZOOM_SCALE_TYPE=2
ZOOM_SCALE_FILTER=3
NDIONEFRAMEDURATION=50
SDIONEFRAMEDURATION=50
RECORDFLAG=1
OPENOMP=1
TIMEOUTFRAMES=250
SECONDDELEYSDINUMS=0
REPLAY_START_FORWARD=50
REPLAY_END_DELEY=50
RTSDIOUTPUTWAITNUMS=0
CROPRECORD=0
CROP_X=400
CROP_Y=1800
CROP_DIRECTION=3
UI_UDP_PORT=8100
UI_IP_ADDR=127.0.0.1
回放模式缓存大小 单位毫秒 缓存帧数= REPLAY_STORE_TIME * FRAME_RATE / 1000
回放模式缓存大小 单位毫秒 缓存帧数= REPLAY_STORE_TIME * FRAME_RATE / 1000
REPLAY_STORE_TIME=10000
开启回放模式后的缓存数据大小 目前是能够找到完整的动出帧 单位ms
REPLAY_BACK_STORE_TIME=3000
如果存在裁切输出情况下,缓存裁切之后视频帧 单位ms 缓存的是未贴图的裁切视频数据
REPLAY_STORE_CROP_TIME=5000
缓存贴图操作的帧数 目的是等算法的mask的图 单位ms
FRONT_DELEY_TIME=1000
数据流的帧数
FRAME_RATE=50
NDI输出的贴图静帧帧数
NDIONEFRAMEDURATION=50
SDI输出的贴图静帧帧数
SDIONEFRAMEDURATION=50
动出的帧数
DYNAMIC_OUT=50
打开omp优化开关 默认打开
OPENOMP=1
等待算法发送的mask图的超时时间 针对如果未收到算法给的结束帧情况使用
TIMEOUTFRAMES=250
二次确认的开关 默认打开
SECONDFLAG=1
二次确认逻辑的缓存队列的最小数 默认为1 不用改
SECONDSDIWAITNUM=1
二次确认逻辑发送给sdi贴图前的帧数 主要用于自动模式
SECONDDELEYSDINUMSFRONT=50
二次确认逻辑发送给sdi贴图后的帧数 已不用废弃
SECONDDELEYSDINUMSBACK=0
回放模式动态进入的帧数
REPLAY_START_FORWARD=50
回放模式Out点之后要输出的帧数 此参数要大于等于DYNAMIC_OUT数值
REPLAY_END_DELEY=50
回放模式In点之前要输出的帧数
REPLAY_IN_NDI_FORWARD=5
最大动进的帧数
MAXDYNAMICIN=150
最大动出的帧数
MAXDYNAMICOUT=150
实时模式开关 默认为1 不用改 如果想打开此模式 只需要关闭二次确认模式即可 即SECONDFLAG=0
RTSDIOUTPUTWAITNUMS=1
UI界面的udp端口号 默认值 不用改
UI_UDP_PORT=8100
本机的ipv4地址 目前系统能够自动获取本地ipv4地址 如果存在多个ipv4地址 需要手动选择一个正确的再次配置即可
UI_IP_ADDR=0.0.0.0
/-----------目前用不到的参数保持不动即可-----------/
RECORDFLAG=1
RECORD_STORE_TIME=10000
CROPRECORD=0
CROP_X=3200
CROP_Y=1800
CROP_DIRECTION=4
USE_PIC_FLAG=1
OUTPUT_PLAY_MODE=0
AUDIO_CHANNEL=2
ASPEC_DEN=9
ASPEC_NUM=16
ZOOM_DRAW=0
ZOOM_FLAG=0
ZOOM_SCALE=1.5
ZOOM_SCALE_N=1
ZOOM_SCALE_D=2
ZOOM_IN_D=2
ZOOM_MOVE_D=2
ZOOM_OUT_D=2
ZOOM_IN_WAIT=15
ZOOM_MOVE_WAIT=15
ZOOM_OUT_WAIT=15
ZOOM_USE_OMP=1
ZOOM_SCALE_TYPE=2
ZOOM_SCALE_FILTER=3
\ No newline at end of file
[DELEYTIME]
REPLAY_STORE_TIME=10000
REPLAY_BACK_STORE_TIME=5000
REPLAY_STORE_CROP_TIME=5000
RECORD_STORE_TIME=10000
FRONT_DELEY_TIME=1000
FRAME_RATE=50
PRVW_FLAG=0
NDIONEFRAMEDURATION=50
SDIONEFRAMEDURATION=50
DYNAMIC_OUT=50
RECORDFLAG=0
OPENOMP=1
TIMEOUTFRAMES=250
SECONDFLAG=1
SECONDSDIWAITNUM=1
SECONDDELEYSDINUMSFRONT=50
SECONDDELEYSDINUMSBACK=0
REPLAY_START_FORWARD=50
REPLAY_END_DELEY=150
REPLAY_IN_NDI_FORWARD=5
MAXDYNAMICIN=150
MAXDYNAMICOUT=150
MAXSTATICFRAME=150
RTSDIOUTPUTWAITNUMS=1
UI_UDP_PORT=8100
UI_IP_ADDR=0.0.0.0
CROPRECORD=0
CROP_X=3200
CROP_Y=1800
CROP_DIRECTION=4
USE_PIC_FLAG=1
OUTPUT_PLAY_MODE=0
BLACK_BOTTOM_HEIGHT=0
AUDIO_CHANNEL=2
ASPEC_DEN=9
ASPEC_NUM=16
SCALE_MODE=0
SCALE_MODE_1=0
SCALE_MODE_2=0
SCALE_MODE_3=0
SCALE_MODE_4=0
DELEY_TIME=2000
DELEY_TIME_1=5000
DELEY_TIME_2=14000
DELEY_TIME_3=5000
DELEY_TIME_4=6000
BLACK_BOTTOM_HEIGHT_1=16
BLACK_BOTTOM_HEIGHT_2=20
BLACK_BOTTOM_HEIGHT_3=20
BLACK_BOTTOM_HEIGHT_4=30
ZOOM_DRAW=0
ZOOM_FLAG=1
ZOOM_FLAG=0
ZOOM_SCALE=1.5
ZOOM_SCALE_N=1
ZOOM_SCALE_D=2
......@@ -37,18 +47,3 @@ ZOOM_OUT_WAIT=15
ZOOM_USE_OMP=1
ZOOM_SCALE_TYPE=2
ZOOM_SCALE_FILTER=3
NDIONEFRAMEDURATION=50
SDIONEFRAMEDURATION=50
RECORDFLAG=1
OPENOMP=1
TIMEOUTFRAMES=250
SECONDDELEYSDINUMS=100
REPLAY_START_FORWARD=50
REPLAY_END_DELEY=50
RTSDIOUTPUTWAITNUMS=5
CROPRECORD=0
CROP_X=400
CROP_Y=400
CROP_DIRECTION=1
UI_UDP_PORT=8100
UI_IP_ADDR=127.0.0.1
回放模式缓存大小 单位毫秒 缓存帧数= REPLAY_STORE_TIME * FRAME_RATE / 1000
回放模式缓存大小 单位毫秒 缓存帧数= REPLAY_STORE_TIME * FRAME_RATE / 1000
REPLAY_STORE_TIME=10000
开启回放模式后的缓存数据大小 目前是能够找到完整的动出帧 单位ms
REPLAY_BACK_STORE_TIME=3000
如果存在裁切输出情况下,缓存裁切之后视频帧 单位ms 缓存的是未贴图的裁切视频数据
REPLAY_STORE_CROP_TIME=5000
缓存贴图操作的帧数 目的是等算法的mask的图 单位ms
FRONT_DELEY_TIME=1000
数据流的帧数
FRAME_RATE=50
NDI输出的贴图静帧帧数
NDIONEFRAMEDURATION=50
SDI输出的贴图静帧帧数
SDIONEFRAMEDURATION=50
动出的帧数
DYNAMIC_OUT=50
打开omp优化开关 默认打开
OPENOMP=1
等待算法发送的mask图的超时时间 针对如果未收到算法给的结束帧情况使用
TIMEOUTFRAMES=250
二次确认的开关 默认打开
SECONDFLAG=1
二次确认逻辑的缓存队列的最小数 默认为1 不用改
SECONDSDIWAITNUM=1
二次确认逻辑发送给sdi贴图前的帧数 主要用于自动模式
SECONDDELEYSDINUMSFRONT=50
二次确认逻辑发送给sdi贴图后的帧数 已不用废弃
SECONDDELEYSDINUMSBACK=0
回放模式动态进入的帧数
REPLAY_START_FORWARD=50
回放模式Out点之后要输出的帧数 此参数要大于等于DYNAMIC_OUT数值
REPLAY_END_DELEY=50
回放模式In点之前要输出的帧数
REPLAY_IN_NDI_FORWARD=5
最大动进的帧数
MAXDYNAMICIN=150
最大动出的帧数
MAXDYNAMICOUT=150
实时模式开关 默认为1 不用改 如果想打开此模式 只需要关闭二次确认模式即可 即SECONDFLAG=0
RTSDIOUTPUTWAITNUMS=1
UI界面的udp端口号 默认值 不用改
UI_UDP_PORT=8100
本机的ipv4地址 目前系统能够自动获取本地ipv4地址 如果存在多个ipv4地址 需要手动选择一个正确的再次配置即可
UI_IP_ADDR=0.0.0.0
/-----------目前用不到的参数保持不动即可-----------/
RECORDFLAG=1
RECORD_STORE_TIME=10000
CROPRECORD=0
CROP_X=3200
CROP_Y=1800
CROP_DIRECTION=4
USE_PIC_FLAG=1
OUTPUT_PLAY_MODE=0
AUDIO_CHANNEL=2
ASPEC_DEN=9
ASPEC_NUM=16
ZOOM_DRAW=0
ZOOM_FLAG=0
ZOOM_SCALE=1.5
ZOOM_SCALE_N=1
ZOOM_SCALE_D=2
ZOOM_IN_D=2
ZOOM_MOVE_D=2
ZOOM_OUT_D=2
ZOOM_IN_WAIT=15
ZOOM_MOVE_WAIT=15
ZOOM_OUT_WAIT=15
ZOOM_USE_OMP=1
ZOOM_SCALE_TYPE=2
ZOOM_SCALE_FILTER=3
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment