Commit 9a1411ee by “liusq”

新增mqtt和增加调度器日志

parent 381449c0
#include "AlgorithmTaskManage.h" #include "AlgorithmTaskManage.h"
AlgorithmTaskManage::AlgorithmTaskManage(int humanDetectionLen,int licensePlateLen,int faceLen ):semaphore(humanDetectionLen), AlgorithmTaskManage::AlgorithmTaskManage():isShuttingDown(false){
plateSemaphore(licensePlateLen),faceSemaphore(faceLen){
}
void AlgorithmTaskManage::initialize(int humanDetectionLen, int licensePlateLen, int faceLen,bool first,__uint8_t algorithmPermissions){
if(first){
this->humanDetectionLen=humanDetectionLen; this->humanDetectionLen=humanDetectionLen;
this->licensePlateLen=licensePlateLen; this->licensePlateLen=licensePlateLen;
this->faceLen=faceLen; this->faceLen=faceLen;
semaphore =new QSemaphore(humanDetectionLen);
plateSemaphore =new QSemaphore(licensePlateLen);
faceSemaphore = new QSemaphore(faceLen);
}else{
Common & instace= Common::getInstance();
if (algorithmPermissions == 0x00) {
this->humanDetectionLen=humanDetectionLen;
if(semaphore!=nullptr){
instace.deleteObj(semaphore);
}
semaphore =new QSemaphore(humanDetectionLen);
}
if(algorithmPermissions == 0x01){
this->licensePlateLen=licensePlateLen;
if(plateSemaphore!=nullptr){
instace.deleteObj(plateSemaphore);
}
plateSemaphore =new QSemaphore(licensePlateLen);
}
if(algorithmPermissions ==0x02){
this->faceLen=faceLen;
if(faceSemaphore!=nullptr){
instace.deleteObj(faceSemaphore);
}
faceSemaphore = new QSemaphore(faceLen);
}
}
} }
void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths, void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,float &height_reference,int &uniformColor) { float carShapeConfidence,int &uniformColor) {
for (int i = 0; i < humanDetectionLen; ++i) { for (int i = 0; i < humanDetectionLen; ++i) {
HumanDetection* human=new HumanDetection(modelPaths,carShapeConfidence); HumanDetection* human=new HumanDetection(modelPaths,carShapeConfidence);
human->setHuManParameter(height_reference,uniformColor); human->setHuManParameter(uniformColor);
humanDetections.emplace_back(human); humanDetections.emplace_back(human);
} }
} }
...@@ -34,6 +67,9 @@ void AlgorithmTaskManage::initFaceReconitionHandle(std::map<QString,QString>&map ...@@ -34,6 +67,9 @@ void AlgorithmTaskManage::initFaceReconitionHandle(std::map<QString,QString>&map
} }
void AlgorithmTaskManage::modifyImageFeature(std::map<QString,QString>&maps,int numberFaces,float confidence,bool isNull){ void AlgorithmTaskManage::modifyImageFeature(std::map<QString,QString>&maps,int numberFaces,float confidence,bool isNull){
if (isShuttingDown.load(std::memory_order_acquire)) {
return ;
}
std::lock_guard<std::mutex> lock(mtxFace); std::lock_guard<std::mutex> lock(mtxFace);
for (FaceReconitionHandle* face : faceReconitionHandles) { for (FaceReconitionHandle* face : faceReconitionHandles) {
face->setImageChanged(true); face->setImageChanged(true);
...@@ -59,6 +95,44 @@ AlgorithmTaskManage::~AlgorithmTaskManage(){ ...@@ -59,6 +95,44 @@ AlgorithmTaskManage::~AlgorithmTaskManage(){
} }
} }
void AlgorithmTaskManage::releaseResources(
int newHumanDetectionLen, int newLicensePlateLen, int newFaceLen,const QString &odelPaths,
float humanCarShapeConfidence,
int uniformColor,
float licensePlateCarConfidence,
std::map<QString,QString>& faceMaps,
int numberFaces,
float faceConfidence,
__uint8_t algorithmPermissions) {
Common & instance = Common::getInstance();
isShuttingDown.store(true, std::memory_order_release);
ScopeSemaphoreExit guard([this]() {
isShuttingDown.store(false, std::memory_order_release);
});
qInfo()<<"修改参数:releaseResources "<<algorithmPermissions;
// 穿工服算法参数更新
if ((algorithmPermissions & 0x01 << 2) != 0) {
resetSemaphoreAndClearObjects(instance,semaphore, humanDetections, humanDetectionLen);
initialize(newHumanDetectionLen, newLicensePlateLen, newFaceLen, false, 0x00);
initHumanDetectionManage(odelPaths, humanCarShapeConfidence, uniformColor);
}
// 人脸算法参数更新
if ((algorithmPermissions & 0x01 << 1) != 0) {
resetSemaphoreAndClearObjects(instance,faceSemaphore, faceReconitionHandles, faceLen);
initialize(newHumanDetectionLen, newLicensePlateLen, newFaceLen, false, 0x02);
initFaceReconitionHandle(faceMaps, numberFaces, faceConfidence);
}
// 车牌算法参数更新
if ((algorithmPermissions & 0x01) != 0) {
resetSemaphoreAndClearObjects(instance,plateSemaphore, licensePlateRecognitions, licensePlateLen);
initialize(newHumanDetectionLen, newLicensePlateLen, newFaceLen, false, 0x01);
initLicensePlateManage(odelPaths, licensePlateCarConfidence);
}
}
void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) { void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) {
if (scheType == 0x01) { if (scheType == 0x01) {
return schedulingAlgorithmTemplate(humanDetections, mtxHuman); return schedulingAlgorithmTemplate(humanDetections, mtxHuman);
...@@ -73,9 +147,12 @@ void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) { ...@@ -73,9 +147,12 @@ void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) {
} }
void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces,QString sSn){ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces,QString sSn){
faceSemaphore.acquire(); if (isShuttingDown.load(std::memory_order_acquire)) {
return ;
}
faceSemaphore->acquire();
ScopeSemaphoreExit guard([this]() { ScopeSemaphoreExit guard([this]() {
faceSemaphore.release(); // 释放信号量 faceSemaphore->release(); // 释放信号量
}); });
auto* selectedFaceReconition = static_cast<FaceReconitionHandle*>(schedulingAlgorithm(0x03)); auto* selectedFaceReconition = static_cast<FaceReconitionHandle*>(schedulingAlgorithm(0x03));
if (selectedFaceReconition!=nullptr && !selectedFaceReconition->getImageChanged()) { if (selectedFaceReconition!=nullptr && !selectedFaceReconition->getImageChanged()) {
...@@ -84,7 +161,7 @@ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,s ...@@ -84,7 +161,7 @@ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,s
qInfo() << "人脸识别算法抢到===>sn"<<sSn<<selectedFaceReconition; qInfo() << "人脸识别算法抢到===>sn"<<sSn<<selectedFaceReconition;
selectedFaceReconition->doesItExistEmployee(source, faces); selectedFaceReconition->doesItExistEmployee(source, faces);
} else { } else {
qInfo() << "没有可用的LicensePlateRecognition对象可以调度"; qInfo() << "没有可用的selectedFaceReconition对象可以调度";
return ; return ;
} }
} }
...@@ -92,9 +169,12 @@ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,s ...@@ -92,9 +169,12 @@ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,s
void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate, void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime,QString sSn){ qint64 currentTime,QString sSn){
plateSemaphore.acquire(); if (isShuttingDown.load(std::memory_order_acquire)) {
return ;
}
plateSemaphore->acquire();
ScopeSemaphoreExit guard([this]() { ScopeSemaphoreExit guard([this]() {
plateSemaphore.release(); // 释放信号量 plateSemaphore->release(); // 释放信号量
}); });
auto* selectedLicensePlate = static_cast<LicensePlateRecognition*>(schedulingAlgorithm(0x02)); auto* selectedLicensePlate = static_cast<LicensePlateRecognition*>(schedulingAlgorithm(0x02));
if (selectedLicensePlate!=nullptr) { if (selectedLicensePlate!=nullptr) {
...@@ -103,24 +183,27 @@ void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, Q ...@@ -103,24 +183,27 @@ void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, Q
qInfo() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate; qInfo() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate;
selectedLicensePlate->licensePlateNumber(source, lpNumber,plate, currentTime); selectedLicensePlate->licensePlateNumber(source, lpNumber,plate, currentTime);
} else { } else {
qInfo() << "没有可用的LicensePlateRecognition对象可以调度"; qInfo() << "没有可用的selectedLicensePlate对象可以调度";
return ; return ;
} }
} }
int AlgorithmTaskManage::executeFindHuManCar(const cv::Mat &source, int res, int AlgorithmTaskManage::executeFindHuManCar(const cv::Mat &source, int res,
std::vector<vides_data::ParkingArea> &currentPlate,std::map<int,int>&resMap,QString sSn) { std::vector<vides_data::ParkingArea> &currentPlate,std::map<int,int>&resMap,QString sSn,float & heightReference) {
if (isShuttingDown.load(std::memory_order_acquire)) {
semaphore.acquire(); return -2;
}
semaphore->acquire();
ScopeSemaphoreExit guard([this]() { ScopeSemaphoreExit guard([this]() {
semaphore.release(); // 释放信号量 semaphore->release(); // 释放信号量
}); });
auto* selectedHumanDetection = static_cast<HumanDetection*>(schedulingAlgorithm(0x01)); auto* selectedHumanDetection = static_cast<HumanDetection*>(schedulingAlgorithm(0x01));
if (selectedHumanDetection!=nullptr) { if (selectedHumanDetection != nullptr ) {
selectedHumanDetection->setIsRunning(true); selectedHumanDetection->setIsRunning(true);
// 调用选定对象的findHuManCar函数 // 调用选定对象的findHuManCar函数
qInfo() << "人形调度算法抢到===>sn"<<sSn<<"res"<<selectedHumanDetection; qInfo() << "人形调度算法抢到===>sn"<<sSn<<"res"<<selectedHumanDetection;
int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, currentPlate); int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, heightReference,currentPlate);
return detectionResult; return detectionResult;
} else { } else {
qInfo() << "没有可用的HumanDetection对象可以调度"; qInfo() << "没有可用的HumanDetection对象可以调度";
......
...@@ -14,15 +14,18 @@ ...@@ -14,15 +14,18 @@
#include <mutex> #include <mutex>
class AlgorithmTaskManage{ class AlgorithmTaskManage{
public: public:
AlgorithmTaskManage(int humanDetectionLen,int licensePlateLen,int faceLen); AlgorithmTaskManage();
~AlgorithmTaskManage(); ~AlgorithmTaskManage();
static AlgorithmTaskManage& getInstance(int humanDetectionLen,int licensePlateLen,int faceLen) static AlgorithmTaskManage& getInstance()
{ {
static AlgorithmTaskManage instance(humanDetectionLen,licensePlateLen,faceLen); static AlgorithmTaskManage instance;
return instance; return instance;
} }
void initialize(int humanDetectionLen, int licensePlateLen, int faceLen,bool first,__uint8_t algorithmPermissions );
void initHumanDetectionManage(const QString &modelPaths, void initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,float &height_reference,int &uniformColor); float carShapeConfidence,int &uniformColor);
void initLicensePlateManage(const QString &modelPaths, void initLicensePlateManage(const QString &modelPaths,
float carConfidence); float carConfidence);
...@@ -33,9 +36,19 @@ public: ...@@ -33,9 +36,19 @@ public:
void *schedulingAlgorithm(int scheType); void *schedulingAlgorithm(int scheType);
void releaseResources(int newHumanDetectionLen, int newLicensePlateLen, int newFaceLen, const QString &odelPaths,
float humanCarShapeConfidence,
int uniformColor,
float licensePlateCarConfidence,
std::map<QString,QString>& faceMaps,
int numberFaces,
float faceConfidence,
__uint8_t algorithmPermissions);
int executeFindHuManCar(const cv::Mat &source,int res,std::vector<vides_data::ParkingArea> &currentPlate, int executeFindHuManCar(const cv::Mat &source,int res,std::vector<vides_data::ParkingArea> &currentPlate,
std::map<int,int>&resMap, QString sSn); std::map<int,int>&resMap, QString sSn,float & heightReference);
void executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate, void executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime,QString sSn); qint64 currentTime,QString sSn);
...@@ -50,7 +63,6 @@ private: ...@@ -50,7 +63,6 @@ private:
qint64 currentTime = QDateTime::currentMSecsSinceEpoch(); qint64 currentTime = QDateTime::currentMSecsSinceEpoch();
qint64 maxWaitTime = 0; qint64 maxWaitTime = 0;
std::vector<T*> schedulableObjects; std::vector<T*> schedulableObjects;
for (T* obj : objects) { for (T* obj : objects) {
if (obj->getIsRunning()) continue; if (obj->getIsRunning()) continue;
qint64 waitTime = std::abs(currentTime - obj->getThreadTime()); qint64 waitTime = std::abs(currentTime - obj->getThreadTime());
...@@ -65,16 +77,36 @@ private: ...@@ -65,16 +77,36 @@ private:
if (schedulableObjects.empty()) { if (schedulableObjects.empty()) {
return nullptr; return nullptr;
} }
if (schedulableObjects.size() == 1) { if (schedulableObjects.size() == 1) {
return schedulableObjects.at(0); T* selected = schedulableObjects.at(0);
selected->setIsRunning(true); // 立刻标记为正在运行
return selected;
} }
std::random_device rd; std::random_device rd;
std::mt19937 gen(rd()); std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1); std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)]; T* selected = schedulableObjects[dis(gen)];
selected->setIsRunning(true); // 立刻标记为正在运行
return selected;
}
template<typename T>
void resetSemaphoreAndClearObjects(Common& instance, QSemaphore*& semaphore, std::vector<T*>& containers, int len) {
if (semaphore != nullptr) {
semaphore->acquire(len);
for (auto obj : containers) {
do {
if (!obj->getIsRunning()) {
instance.deleteObj(obj);
break;
}
} while (obj->getIsRunning());
}
containers.clear();
semaphore->release(len);
instance.deleteObj(semaphore);
}
} }
static AlgorithmTaskManage* instance; static AlgorithmTaskManage* instance;
std::vector<HumanDetection*>humanDetections; std::vector<HumanDetection*>humanDetections;
...@@ -89,11 +121,11 @@ private: ...@@ -89,11 +121,11 @@ private:
int faceLen; int faceLen;
QSemaphore semaphore; QSemaphore *semaphore;
QSemaphore plateSemaphore; QSemaphore *plateSemaphore;
QSemaphore faceSemaphore; QSemaphore *faceSemaphore;
std::mutex mtxHuman; std::mutex mtxHuman;
...@@ -101,7 +133,7 @@ private: ...@@ -101,7 +133,7 @@ private:
std::mutex mtxFace; std::mutex mtxFace;
std::atomic<bool> isShuttingDown;
}; };
......
#include "BaseAlgorithm.h"
BaseAlgorithm::BaseAlgorithm():thread_time(0){
}
BaseAlgorithm::~BaseAlgorithm(){
}
qint64 BaseAlgorithm::getThreadTime()const{
return thread_time.load(std::memory_order_acquire);
}
bool BaseAlgorithm::getIsRunning()const{
return isRunning.load(std::memory_order_acquire);
}
void BaseAlgorithm::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_release);
}
#ifndef BASEALGORITHM_H
#define BASEALGORITHM_H
#include <QDateTime>
#include <atomic>
class BaseAlgorithm {
protected:
std::atomic<qint64>thread_time;
std::atomic<bool> isRunning{false};
public:
BaseAlgorithm();
virtual ~BaseAlgorithm();
qint64 getThreadTime()const;
bool getIsRunning()const;
void setIsRunning(bool running);
};
#endif // BASEALGORITHM_H
...@@ -40,7 +40,8 @@ enum CAR_INFORMATION { ...@@ -40,7 +40,8 @@ enum CAR_INFORMATION {
class CameraHandle: public QObject { class CameraHandle: public QObject {
Q_OBJECT Q_OBJECT
public: public:
CameraHandle(QString &url,QString &httpUrl,QString &sSn, int &channel, int imageSave); CameraHandle(QString &url,QString &httpUrl,QString &sSn, int &channel, int imageSave,
float &heightReference,vides_data::responseConfig &devConfig);
CameraHandle(); CameraHandle();
~CameraHandle(); ~CameraHandle();
int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout); int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
...@@ -55,6 +56,9 @@ public: ...@@ -55,6 +56,9 @@ public:
void clearCameraHandle(); void clearCameraHandle();
// void rebindTimer(int hDevice); // void rebindTimer(int hDevice);
//相机参数更新
void cameraParameterUpdate(vides_data::responseConfig &cloudConfig);
void initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 face_frequency); void initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 face_frequency);
void notificationUpdateImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence); void notificationUpdateImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence);
...@@ -79,6 +83,8 @@ public: ...@@ -79,6 +83,8 @@ public:
void printWifi(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg); void printWifi(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg);
void setCarConfidenceMaxAndMin(float carConfidenceMax,float carConfidenceMin);
//设置相机连接的wifi //设置相机连接的wifi
void sdkWifi(QString &pwd,QString &ssid); void sdkWifi(QString &pwd,QString &ssid);
//时间设置 //时间设置
...@@ -89,8 +95,10 @@ public: ...@@ -89,8 +95,10 @@ public:
void sdkEncodeCfg(const char *enCode); void sdkEncodeCfg(const char *enCode);
//28181更新 //28181更新
void sdkDevSpvMn(const char* spvMn); void sdkDevSpvMn(const char* spvMn);
void updateSdkDevSpvMn(vides_data::responseGb28181 *gb28181);
//重启设备 //重启设备
void deviceReboot(); void deviceReboot(bool isCloseHandle );
//获取固件版本 //获取固件版本
void findFirmwareVersion(QString &firmwareVersion); void findFirmwareVersion(QString &firmwareVersion);
...@@ -107,6 +115,7 @@ public: ...@@ -107,6 +115,7 @@ public:
bool isClockwise(const std::vector<cv::Point2f>& polygon); bool isClockwise(const std::vector<cv::Point2f>& polygon);
QString getSSn(); QString getSSn();
int getMediaHandle(); int getMediaHandle();
void setMediaHandle(int mediaHandle); void setMediaHandle(int mediaHandle);
...@@ -122,6 +131,8 @@ public: ...@@ -122,6 +131,8 @@ public:
std::map<QString, QString>&getCurrentData(); std::map<QString, QString>&getCurrentData();
vides_data::responseConfig &getDeviceConfig();
bool isChanged(const QPoint& newInfo, const QPoint& current); bool isChanged(const QPoint& newInfo, const QPoint& current);
// 检查点是否在多边形内 // 检查点是否在多边形内
bool polygonsOverlap(ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2); bool polygonsOverlap(ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2);
...@@ -182,6 +193,14 @@ private : ...@@ -182,6 +193,14 @@ private :
__uint8_t algorithmPermissions; __uint8_t algorithmPermissions;
vides_data::responseConfig devConfig;
float heightReference;
float carConfidenceMax;
float carConfidenceMin;
}; };
#endif // CAMERAHANDLE_H #endif // CAMERAHANDLE_H
...@@ -62,35 +62,6 @@ void Common::setImages(QString images){ ...@@ -62,35 +62,6 @@ void Common::setImages(QString images){
images.append("/"); images.append("/");
this->images=images; this->images=images;
} }
float Common::getCarConfidenceMax() const{
return carConfidenceMax;
}
void Common::setCarConfidenceMax(float carConfidenceMax){
this->carConfidenceMax=carConfidenceMax;
}
float Common::getCarConfidenceMin() const{
return carConfidenceMin;
}
void Common::setCarConfidenceMin(float carConfidenceMin){
this->carConfidenceMin=carConfidenceMin;
}
int Common::getHumanDetectionLen() const{
return humanDetectionLen;
}
int Common::getLicensePlateLen() const{
return licensePlateLen;
}
int Common::getFaceLen() const{
return faceLen;
}
void Common::setTaskManage(int humanDetectionLen,int licensePlateLen,int faceLen){
this->humanDetectionLen=humanDetectionLen;
this->licensePlateLen=licensePlateLen;
this->faceLen=faceLen;
}
QString Common::GetLocalIp() { QString Common::GetLocalIp() {
QString ipAddress; QString ipAddress;
QList<QHostAddress> list = QNetworkInterface::allAddresses(); QList<QHostAddress> list = QNetworkInterface::allAddresses();
......
...@@ -44,17 +44,6 @@ public: ...@@ -44,17 +44,6 @@ public:
QString GetLocalIp(); QString GetLocalIp();
float getCarConfidenceMax() const;
void setCarConfidenceMax(float carConfidenceMax);
float getCarConfidenceMin() const;
void setCarConfidenceMin(float carConfidenceMin);
int getHumanDetectionLen() const;
int getLicensePlateLen() const;
int getFaceLen() const;
void setTaskManage(int humanDetectionLen,int licensePlateLen,int faceLen);
template <typename T> template <typename T>
const T& clamp(const T& v, const T& lo, const T& hi) const T& clamp(const T& v, const T& lo, const T& hi)
{ {
...@@ -72,11 +61,6 @@ private: ...@@ -72,11 +61,6 @@ private:
QString videoOut; QString videoOut;
QString videoDownload; QString videoDownload;
QString images; QString images;
float carConfidenceMax;
float carConfidenceMin;
int humanDetectionLen;
int licensePlateLen;
int faceLen;
Common(); Common();
~Common(); ~Common();
......
...@@ -15,23 +15,15 @@ FaceReconitionHandle::FaceReconitionHandle() { ...@@ -15,23 +15,15 @@ FaceReconitionHandle::FaceReconitionHandle() {
} }
FaceReconitionHandle::~FaceReconitionHandle(){ FaceReconitionHandle::~FaceReconitionHandle(){
static int i=0;
if (ctxHandle != nullptr) { if (ctxHandle != nullptr) {
HF_ReleaseFaceContext(ctxHandle); HF_ReleaseFaceContext(ctxHandle);
qInfo()<<"人脸析构"<<++i;
ctxHandle = nullptr; ctxHandle = nullptr;
} }
} }
qint64 FaceReconitionHandle::getThreadTime() const{
return thread_time.load(std::memory_order_acquire);
}
bool FaceReconitionHandle::getIsRunning() const{
return isRunning.load(std::memory_order_acquire);
}
void FaceReconitionHandle::setIsRunning(bool running){
this->isRunning.store(running, std::memory_order_release);
}
bool FaceReconitionHandle::getImageChanged()const{ bool FaceReconitionHandle::getImageChanged()const{
return isImageChanged.load(std::memory_order_acquire); return isImageChanged.load(std::memory_order_acquire);
} }
......
#ifndef FACERECONITIONHANDLE_H #ifndef FACERECONITIONHANDLE_H
#define FACERECONITIONHANDLE_H #define FACERECONITIONHANDLE_H
#include "BaseAlgorithm.h"
#include "hyperface.h" #include "hyperface.h"
#include "herror.h" #include "herror.h"
...@@ -10,8 +11,8 @@ ...@@ -10,8 +11,8 @@
#include<QCoreApplication> #include<QCoreApplication>
class FaceReconitionHandle class FaceReconitionHandle : public BaseAlgorithm {
{
private: private:
HContextHandle ctxHandle=nullptr; HContextHandle ctxHandle=nullptr;
...@@ -19,10 +20,6 @@ private: ...@@ -19,10 +20,6 @@ private:
std::vector<int32_t>customIds; std::vector<int32_t>customIds;
std::atomic<qint64> thread_time;
std::atomic<bool> isRunning{false};
std::atomic<bool>isImageChanged{false}; std::atomic<bool>isImageChanged{false};
public: public:
FaceReconitionHandle(); FaceReconitionHandle();
...@@ -31,12 +28,6 @@ public: ...@@ -31,12 +28,6 @@ public:
cv::Mat loadImage(const QString &path); cv::Mat loadImage(const QString &path);
cv::Mat loadImageFromByteStream(const QString& filePath); cv::Mat loadImageFromByteStream(const QString& filePath);
qint64 getThreadTime() const;
bool getIsRunning() const;
void setIsRunning(bool running);
bool getImageChanged()const; bool getImageChanged()const;
void setImageChanged(bool imageChanged); void setImageChanged(bool imageChanged);
......
...@@ -453,6 +453,116 @@ vides_data::response*HttpService::httpFindGb28181Config(QString &serialNumber){ ...@@ -453,6 +453,116 @@ vides_data::response*HttpService::httpFindGb28181Config(QString &serialNumber){
} }
return resp; return resp;
} }
vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,vides_data::responseConfig &config){
httpUrl.append("/api/v1.0/device/config");
vides_data::response *resp=new vides_data::response();
QUrlQuery query;
query.addQueryItem("sn",serialNumber);
QNetworkRequest request;
QUrl url(httpUrl);
url.setQuery(query);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.get(request)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
QJsonObject dataObj = maps["data"].toObject();
QJsonObject mainFormatObj = dataObj["MainFormat"].toObject();
config.mainFormat.AudioEnable = mainFormatObj["AudioEnable"].toBool();
config.mainFormat.BitRate = mainFormatObj["BitRate"].toInt();
config.mainFormat.BitRateControl = mainFormatObj["BitRateControl"].toString();
config.mainFormat.Compression = mainFormatObj["Compression"].toString();
config.mainFormat.FPS = mainFormatObj["FPS"].toInt();
config.mainFormat.GOP = mainFormatObj["GOP"].toInt();
config.mainFormat.Quality = mainFormatObj["Quality"].toInt();
config.mainFormat.Resolution = mainFormatObj["Resolution"].toString();
config.mainFormat.VirtualGOP = mainFormatObj["VirtualGOP"].toInt();
config.mainFormat.VideoEnable = mainFormatObj["VideoEnable"].toBool();
config.mainFormat.updateAt = mainFormatObj["updateAt"].toVariant().toULongLong();
// 解析 extraFormat
QJsonObject extraFormatObj = dataObj["ExtraFormat"].toObject();
config.extraFormat.AudioEnable = extraFormatObj["AudioEnable"].toBool();
config.extraFormat.BitRate = extraFormatObj["BitRate"].toInt();
config.extraFormat.BitRateControl = extraFormatObj["BitRateControl"].toString();
config.extraFormat.Compression = extraFormatObj["Compression"].toString();
config.extraFormat.FPS = extraFormatObj["FPS"].toInt();
config.extraFormat.GOP = extraFormatObj["GOP"].toInt();
config.extraFormat.Quality = extraFormatObj["Quality"].toInt();
config.extraFormat.Resolution = extraFormatObj["Resolution"].toString();
config.extraFormat.VirtualGOP = extraFormatObj["VirtualGOP"].toInt();
config.extraFormat.VideoEnable = extraFormatObj["VideoEnable"].toBool();
config.extraFormat.updateAt = extraFormatObj["updateAt"].toVariant().toULongLong();
// 解析 timerSettings
QJsonObject timerSettingsObj = dataObj["timerSettings"].toObject();
config.timerSettings.deleteLogFileTimer = timerSettingsObj["deleteLogFileTimer"].toInt();
config.timerSettings.devicePermissionSynTimer = timerSettingsObj["devicePermissionSynTimer"].toInt();
config.timerSettings.updateAt = timerSettingsObj["updateAt"].toVariant().toULongLong();
// 解析 faceConfig
QJsonObject faceConfigObj = dataObj["faceConfig"].toObject();
config.faceConfig.isOn = faceConfigObj["isOn"].toBool();
config.faceConfig.faceNumbers = faceConfigObj["faceNumbers"].toInt();
config.faceConfig.faceFrequency = faceConfigObj["faceFrequency"].toInt();
config.faceConfig.confidence = faceConfigObj["confidence"].toVariant().toFloat();
config.faceConfig.updateAt = faceConfigObj["updateAt"].toVariant().toULongLong();
config.faceConfig.faceLen=faceConfigObj["faceLen"].toInt();
// 解析 licensePlateConfig
QJsonObject licensePlateConfigObj = dataObj["licensePlateConfig"].toObject();
config.licensePlateConfig.isOn = licensePlateConfigObj["isOn"].toBool();
config.licensePlateConfig.carConfidence = licensePlateConfigObj["carConfidence"].toVariant().toFloat();
config.licensePlateConfig.carConfidenceMax = licensePlateConfigObj["carConfidenceMax"].toVariant().toFloat();
config.licensePlateConfig.carConfidenceMin = licensePlateConfigObj["carConfidenceMin"].toVariant().toFloat();
config.licensePlateConfig.licensePlateLen=licensePlateConfigObj["licensePlateLen"].toInt();
config.licensePlateConfig.updateAt = licensePlateConfigObj["updateAt"].toVariant().toULongLong();
// 解析 uniformConfig
QJsonObject uniformConfigObj = dataObj["uniformConfig"].toObject();
config.uniformConfig.isOn = uniformConfigObj["isOn"].toBool();
config.uniformConfig.uniformColor = uniformConfigObj["uniformColor"].toInt();
config.uniformConfig.humanDetectionLen = uniformConfigObj["humanDetectionLen"].toInt();
config.uniformConfig.updateAt = uniformConfigObj["updateAt"].toVariant().toULongLong();
config.uniformConfig.carShapeConfidence = uniformConfigObj["carShapeConfidence"].toVariant().toFloat();
// 解析 devicesConfig
QJsonObject devicesConfigObj = dataObj["camera"].toObject();
config.camera.password = devicesConfigObj["password"].toString();
config.camera.username = devicesConfigObj["username"].toString();
config.camera.updateAt = devicesConfigObj["updateAt"].toVariant().toULongLong();
config.camera.devSnapSynTimer = devicesConfigObj["devSnapSynTimer"].toInt();
config.camera.imageSave = devicesConfigObj["imageSave"].toInt();
config.camera.heightReference = devicesConfigObj["heightReference"].toVariant().toFloat();
//解析mqttConfig
QJsonObject mqttConfigObj = dataObj["mqttConfig"].toObject();
config.mqttConfig.address=mqttConfigObj["address"].toString();
config.mqttConfig.clientId=mqttConfigObj["clientId"].toString();
config.mqttConfig.qos=mqttConfigObj["qos"].toInt();
config.mqttConfig.timeout = mqttConfigObj["timeout"].toVariant().toULongLong();
config.mqttConfig.topic=mqttConfigObj["topic"].toString();
config.mqttConfig.username=mqttConfigObj["username"].toString();
config.mqttConfig.password=mqttConfigObj["password"].toString();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=m_httpClient.errorString();
}
return resp;
}
vides_data::response*HttpService::httpFindStream(QString &serialNumber){ vides_data::response*HttpService::httpFindStream(QString &serialNumber){
......
...@@ -50,6 +50,9 @@ public: ...@@ -50,6 +50,9 @@ public:
QString & bucketName,QString &securityToken); QString & bucketName,QString &securityToken);
void setHttpUrl(const QString & httpUrl); void setHttpUrl(const QString & httpUrl);
vides_data::response *httpDeviceConfig(const QString &serialNumber,vides_data::responseConfig &config);
static vides_data::responseStsCredentials stsCredentials; static vides_data::responseStsCredentials stsCredentials;
~HttpService(); ~HttpService();
......
...@@ -4,7 +4,7 @@ ...@@ -4,7 +4,7 @@
#include <QDateTime> #include <QDateTime>
HumanDetection::HumanDetection(const QString &modelPaths, HumanDetection::HumanDetection(const QString &modelPaths,
float carShapeConfidence) : heightReference(250.0f),thread_time(0) { float carShapeConfidence) : heightReference(250.0f){
QByteArray && by_mpath=modelPaths.toUtf8(); QByteArray && by_mpath=modelPaths.toUtf8();
char* m_path=by_mpath.data(); char* m_path=by_mpath.data();
detector = TCV_CreateHumanDetector(m_path,1); detector = TCV_CreateHumanDetector(m_path,1);
...@@ -14,8 +14,11 @@ HumanDetection::HumanDetection(const QString &modelPaths, ...@@ -14,8 +14,11 @@ HumanDetection::HumanDetection(const QString &modelPaths,
} }
HumanDetection::~HumanDetection(){ HumanDetection::~HumanDetection(){
static int i=0;
if(detector!=nullptr){ if(detector!=nullptr){
TCV_ReleaseHumanDetector(detector); TCV_ReleaseHumanDetector(detector);
qInfo()<<"工服析构"<<++i;
detector=nullptr; detector=nullptr;
} }
} }
...@@ -70,35 +73,21 @@ void HumanDetection::draw_human_on_image(const cv::Mat& image, const TCV_ObjectL ...@@ -70,35 +73,21 @@ void HumanDetection::draw_human_on_image(const cv::Mat& image, const TCV_ObjectL
} }
} }
void HumanDetection::setHuManParameter(float &height_reference,int &uniformColor){ void HumanDetection::setHuManParameter(int &uniformColor){
this->heightReference=height_reference;
this->uniformColor=uniformColor; this->uniformColor=uniformColor;
} }
qint64 HumanDetection::getThreadTime()const{
return thread_time.load(std::memory_order_acquire);
}
bool HumanDetection::getIsRunning()const{
return isRunning.load(std::memory_order_acquire);
}
void HumanDetection::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_release);
}
//0 人形 1 车形 2 工服 //0 人形 1 车形 2 工服
int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap, std::vector<vides_data::ParkingArea> &currentPlate) { int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap, float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate) {
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release); thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
TCV_CameraStream *stream = TCV_CreateCameraStream(); TCV_CameraStream *stream = TCV_CreateCameraStream();
ScopeSemaphoreExit streamGuard([this, stream]() { ScopeSemaphoreExit streamGuard([this, stream]() {
isRunning.store(false, std::memory_order_release);
// 释放相机流 // 释放相机流
TCV_ReleaseCameraStream(stream); TCV_ReleaseCameraStream(stream);
isRunning.store(false, std::memory_order_release);
}); });
TCV_CameraStreamSetData(stream, source.data, source.cols, source.rows); TCV_CameraStreamSetData(stream, source.data, source.cols, source.rows);
...@@ -145,6 +134,8 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int ...@@ -145,6 +134,8 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
} }
reMap[0x02] = count_no_uniform; // 未穿工服的行人数量 reMap[0x02] = count_no_uniform; // 未穿工服的行人数量
reMap[0x00] = count_all; // 所有满足条件的行人数量 reMap[0x00] = count_all; // 所有满足条件的行人数量
qInfo()<<"count_all==>"<<count_all;
qInfo()<<"count_no_uniform==>"<<count_no_uniform;
num = (res == 0x00) ? count_all : count_no_uniform; num = (res == 0x00) ? count_all : count_no_uniform;
} }
...@@ -166,11 +157,10 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int ...@@ -166,11 +157,10 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
area.bottomRightCornerX=car.x2; area.bottomRightCornerX=car.x2;
area.bottomRightCornerY=car.y2; area.bottomRightCornerY=car.y2;
currentPlate.push_back(area); currentPlate.push_back(area);
qDebug() << "score 检测到的汽车数量匹配度:" << car.score;
} }
qDebug() << "findHuManCar 检测到的汽车数量:" << num; qInfo() << "findHuManCar 检测到的汽车数量:" << num;
} else { } else {
qDebug() << "参数错误"; qInfo() << "参数错误";
} }
return num; return num;
} }
...@@ -3,35 +3,26 @@ ...@@ -3,35 +3,26 @@
#include "VidesData.h" #include "VidesData.h"
#include "so_human_sdk.h" #include "so_human_sdk.h"
#include "ScopeSemaphoreExit.h" #include "ScopeSemaphoreExit.h"
#include "BaseAlgorithm.h"
#include <signal.h> #include <signal.h>
#include <QDateTime> #include <QDateTime>
#include <opencv2/opencv.hpp> #include <opencv2/opencv.hpp>
#include <QDebug> #include <QDebug>
#include <atomic> #include <atomic>
#include<QThread> #include<QThread>
class HumanDetection:public QObject { class HumanDetection:public BaseAlgorithm {
Q_OBJECT
public: public:
HumanDetection(const QString &modelPaths, HumanDetection(const QString &modelPaths,
float carShapeConfidence); float carShapeConfidence);
~HumanDetection(); ~HumanDetection();
int findHuManCar(const cv::Mat &source,int res,std::map<int,int>&reMap, int findHuManCar(const cv::Mat &source,int res,std::map<int,int>&reMap,
std::vector<vides_data::ParkingArea> &currentPlate); float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate);
void setHuManParameter(float &height_reference,int &uniformColor); void setHuManParameter(int &uniformColor);
void draw_human_on_image(const cv::Mat& image, const TCV_ObjectLocation* boxes, int size); void draw_human_on_image(const cv::Mat& image, const TCV_ObjectLocation* boxes, int size);
qint64 getThreadTime() const;
bool getIsRunning() const;
void setIsRunning(bool running);
private: private:
//高度基准 //高度基准
float heightReference; float heightReference;
...@@ -40,11 +31,6 @@ private: ...@@ -40,11 +31,6 @@ private:
TCV_HumanDetector *detector; TCV_HumanDetector *detector;
std::atomic<qint64> thread_time;
std::atomic<bool> isRunning{false};
}; };
#endif // HUMANDETECTION_H #endif // HUMANDETECTION_H
...@@ -24,7 +24,10 @@ LicensePlateRecognition::LicensePlateRecognition(){ ...@@ -24,7 +24,10 @@ LicensePlateRecognition::LicensePlateRecognition(){
} }
LicensePlateRecognition::~LicensePlateRecognition(){ LicensePlateRecognition::~LicensePlateRecognition(){
static int i=0;
HLPR_ReleaseContext(ctx); HLPR_ReleaseContext(ctx);
qInfo()<<"车牌析构"<<++i;
} }
void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber){ void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber){
...@@ -112,18 +115,6 @@ void LicensePlateRecognition::filterLicensePlateConfidenceMax(vides_data::reques ...@@ -112,18 +115,6 @@ void LicensePlateRecognition::filterLicensePlateConfidenceMax(vides_data::reques
} }
} }
} }
qint64 LicensePlateRecognition::getThreadTime()const{
return thread_time.load(std::memory_order_acquire);
}
bool LicensePlateRecognition::getIsRunning()const{
return isRunning.load(std::memory_order_acquire);
}
void LicensePlateRecognition::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_release);
}
void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate, void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime) { qint64 currentTime) {
......
...@@ -3,6 +3,7 @@ ...@@ -3,6 +3,7 @@
#include "hyper_lpr_sdk.h" #include "hyper_lpr_sdk.h"
#include "LogHandle.h" #include "LogHandle.h"
#include "VidesData.h" #include "VidesData.h"
#include "BaseAlgorithm.h"
#include <QString> #include <QString>
#include <opencv2/opencv.hpp> #include <opencv2/opencv.hpp>
#include <QTextStream> #include <QTextStream>
...@@ -14,7 +15,8 @@ ...@@ -14,7 +15,8 @@
const std::vector<std::string> types = const std::vector<std::string> types =
{"蓝牌", "黄牌单层", "白牌单层", "绿牌新能源", "黑牌港澳", {"蓝牌", "黄牌单层", "白牌单层", "绿牌新能源", "黑牌港澳",
"香港单层", "香港双层", "澳门单层", "澳门双层", "黄牌双层"}; "香港单层", "香港双层", "澳门单层", "澳门双层", "黄牌双层"};
class LicensePlateRecognition{ class LicensePlateRecognition: public BaseAlgorithm {
public: public:
//识别车牌号 //识别车牌号
...@@ -33,22 +35,10 @@ public: ...@@ -33,22 +35,10 @@ public:
~LicensePlateRecognition(); ~LicensePlateRecognition();
qint64 getThreadTime() const;
bool getIsRunning() const;
void setIsRunning(bool running);
private: private:
P_HLPR_Context ctx=nullptr ; P_HLPR_Context ctx=nullptr ;
std::atomic<qint64> thread_time;
std::atomic<bool> isRunning{false};
}; };
......
#include "MqttSubscriber.h"
MqttSubscriber* MqttSubscriber::instance = nullptr;
MqttSubscriber* MqttSubscriber::getInstance(vides_data::MqttConfig& config, QObject* parent) {
if (!instance) {
instance = new MqttSubscriber(config, parent);
}
return instance;
}
MqttSubscriber::MqttSubscriber(vides_data::MqttConfig& config, QObject* parent)
: QObject(parent), config(config) {
QByteArray bAddress = config.address.toUtf8();
char* cAddress=bAddress.data();
QByteArray bClientId = config.clientId.toUtf8();
char* cClientId=bClientId.data();
MQTTAsync_create(&client,cAddress,cClientId, MQTTCLIENT_PERSISTENCE_NONE, nullptr);
MQTTAsync_setCallbacks(client, this, [](void* context, char* cause) {
static_cast<MqttSubscriber*>(context)->connectionLost(cause);
}, [](void* context, char* topicName, int topicLen, MQTTAsync_message* m) {
return static_cast<MqttSubscriber*>(context)->messageArrived(topicName, topicLen, m);
}, nullptr);
}
MqttSubscriber::~MqttSubscriber() {
MQTTAsync_destroy(&client);
instance = nullptr;
}
void MqttSubscriber::start() {
MQTTAsync_connectOptions conn_opts = MQTTAsync_connectOptions_initializer;
conn_opts.keepAliveInterval = 20;
conn_opts.cleansession = 1;
QByteArray bUsername = config.username.toUtf8();
char* cUsername=bUsername.data();
QByteArray bPassword = config.password.toUtf8();
char* cPassword=bPassword.data();
conn_opts.username = cUsername;
conn_opts.password = cPassword;
conn_opts.onSuccess = [](void* context, MQTTAsync_successData* response) {
static_cast<MqttSubscriber*>(context)->onConnect(response);
};
conn_opts.onFailure = [](void* context, MQTTAsync_failureData* response) {
static_cast<MqttSubscriber*>(context)->onConnectFailure(response);
};
conn_opts.context = this;
int rc;
if ((rc = MQTTAsync_connect(client, &conn_opts)) != MQTTASYNC_SUCCESS) {
qInfo() << "启动连接失败,返回编码" << rc;
}
}
void MqttSubscriber::onConnect(MQTTAsync_successData* response) {
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
opts.onSuccess = [](void* context, MQTTAsync_successData* response) {
static_cast<MqttSubscriber*>(context)->onSubscribe(response);
};
opts.onFailure = [](void* context, MQTTAsync_failureData* response) {
static_cast<MqttSubscriber*>(context)->onSubscribeFailure(response);
};
opts.context = this;
QByteArray bTopic = config.topic.toUtf8();
char* cTopic=bTopic.data();
int rc;
if ((rc = MQTTAsync_subscribe(client, cTopic, config.qos, &opts)) != MQTTASYNC_SUCCESS) {
qInfo() << "启动订阅失败,返回编码" << rc<<response->token;
}
}
void MqttSubscriber::onConnectFailure(MQTTAsync_failureData* response) {
qInfo() << "连接失败, rc" << (response ? response->code : -1);
}
void MqttSubscriber::onSubscribe(MQTTAsync_successData* response) {
qInfo() << "订阅成功"<<response->token;
}
void MqttSubscriber::onSubscribeFailure(MQTTAsync_failureData* response) {
qInfo() << "订阅失败, rc" << (response ? response->code : -1);
}
void MqttSubscriber::connectionLost(char* cause) {
qInfo() << "连接丢失";
if (cause) {
qInfo() << "Cause:" << cause;
}
}
int MqttSubscriber::messageArrived(char* topicName, int topicLen, MQTTAsync_message* m) {
QString topic(topicName);
QString payload = QString::fromUtf8(reinterpret_cast<const char*>(m->payload), m->payloadlen);
qInfo() << "Message arrived";
qInfo() << "Topic:" << topic;
qInfo() << "Payload:" << payload;
qInfo()<<"topicLen"<<topicLen;
MQTTAsync_freeMessage(&m);
MQTTAsync_free(topicName);
return 1;
}
void MqttSubscriber::sendSubscriptionConfirmation(const std::string& messageId) {
std::string confirmationTopic = "confirmation/subscription";
std::string confirmationMessage = "Subscription confirmed with message ID: " + messageId;
MQTTAsync_message pubmsg = MQTTAsync_message_initializer;
pubmsg.payload = const_cast<char*>(confirmationMessage.c_str());
pubmsg.payloadlen = confirmationMessage.length();
pubmsg.qos = config.qos;
pubmsg.retained = 0;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
opts.onSuccess = [](void* context, MQTTAsync_successData* response) {
static_cast<MqttSubscriber*>(context)->onPublishSuccess(response);
};
opts.onFailure = [](void* context, MQTTAsync_failureData* response) {
static_cast<MqttSubscriber*>(context)->onPublishFailure(response);
};
opts.context = this;
int rc;
if ((rc = MQTTAsync_sendMessage(client, confirmationTopic.c_str(), &pubmsg, &opts)) != MQTTASYNC_SUCCESS) {
qInfo() << "发送消息失败,返回编码" << rc;
}
}
void MqttSubscriber::onPublishSuccess(MQTTAsync_successData* response) {
qInfo() << "消息已成功发布"<<response->token;
}
void MqttSubscriber::onPublishFailure(MQTTAsync_failureData* response) {
qInfo() << "消息发布失败, rc" << (response ? response->code : -1);
}
#ifndef MQTTSUBSCRIBER_H
#define MQTTSUBSCRIBER_H
#include <MQTTClient.h>
#include <MQTTAsync.h>
#include <QObject>
#include "VidesData.h"
class MqttSubscriber : public QObject
{
Q_OBJECT
public:
static MqttSubscriber* getInstance(vides_data::MqttConfig& config, QObject* parent = nullptr); ~MqttSubscriber();
void start();
private:
MqttSubscriber(vides_data:: MqttConfig& config, QObject* parent = nullptr);
MqttSubscriber(const MqttSubscriber&) = delete;
MqttSubscriber& operator=(const MqttSubscriber&) = delete;
MQTTAsync client;
vides_data::MqttConfig config;
void onConnect(MQTTAsync_successData* response);
void onConnectFailure(MQTTAsync_failureData* response);
void onSubscribe(MQTTAsync_successData* response);
void onSubscribeFailure(MQTTAsync_failureData* response);
void connectionLost(char* cause);
int messageArrived(char* topicName, int topicLen, MQTTAsync_message* m);
void onPublishSuccess(MQTTAsync_successData* response);
void onPublishFailure(MQTTAsync_failureData* response);
void sendSubscriptionConfirmation(const std::string& messageId);
static MqttSubscriber* instance;
};
#endif // MQTTSUBSCRIBER_H
...@@ -204,6 +204,106 @@ struct responseRecognitionData ...@@ -204,6 +204,106 @@ struct responseRecognitionData
int recognitionType; int recognitionType;
QString sn; QString sn;
}; };
struct MainFormat {
bool AudioEnable;
int BitRate;
QString BitRateControl;
QString Compression;
int FPS;
int GOP;
int Quality;
QString Resolution;
int VirtualGOP;
bool VideoEnable;
quint64 updateAt;
};
struct ExtraFormat {
bool AudioEnable;
int BitRate;
QString BitRateControl;
QString Compression;
int FPS;
int GOP;
int Quality;
QString Resolution;
int VirtualGOP;
bool VideoEnable;
quint64 updateAt;
};
struct TimerSettings {
int deleteLogFileTimer;
int devicePermissionSynTimer;
quint64 updateAt;
};
struct Camera {
int devSnapSynTimer;
int imageSave;
QString password;
QString username;
float heightReference;
quint64 updateAt;
};
struct FaceConfig {
bool isOn;
int faceNumbers;
uint64 faceFrequency;
float confidence;
int faceLen;
quint64 updateAt;
};
struct LicensePlateConfig {
bool isOn;
float carConfidence;
float carConfidenceMax;
float carConfidenceMin;
int licensePlateLen;
quint64 updateAt;
};
struct UniformConfig {
bool isOn;
int uniformColor;
int humanDetectionLen;
float carShapeConfidence;
quint64 updateAt;
};
struct MqttConfig {
QString address;
QString clientId;
int qos;
quint64 timeout;
QString topic;
QString username;
QString password;
};
struct responseConfig {
MainFormat mainFormat;
ExtraFormat extraFormat;
TimerSettings timerSettings;
FaceConfig faceConfig;
LicensePlateConfig licensePlateConfig;
UniformConfig uniformConfig;
Camera camera;
MqttConfig mqttConfig;
};
struct Devices{
QString id;
QString state;
};
struct responseMqttData{
uint8_t msg_type;
std::list<Devices>devices;
};
inline bool isVirtualMachine() inline bool isVirtualMachine()
{ {
QString dmiPath; QString dmiPath;
...@@ -270,13 +370,24 @@ inline bool pingAddress(const QString &address) { ...@@ -270,13 +370,24 @@ inline bool pingAddress(const QString &address) {
QString program = "ping"; QString program = "ping";
QStringList arguments; QStringList arguments;
arguments << "-c" << "1" << address; // -c 1 表示发送一个 Ping 包 #ifdef Q_OS_WIN
arguments << "-n" << "1" << address;
#else
arguments << "-c" << "1" << address;
#endif
process.start(program, arguments); process.start(program, arguments);
process.waitForFinished(); if (!process.waitForStarted()) {
return false;
}
if (!process.waitForFinished(1000)) {
return false;
}
QString output(process.readAllStandardOutput()); QString output(process.readAllStandardOutput());
// 此处可以使用更复杂的逻辑来解析 Ping 输出
return output.contains("1 packets transmitted, 1 received"); return output.contains("1 packets transmitted, 1 received");
} }
inline int GetCpuIdByAsm_arm(char* cpu_id) inline int GetCpuIdByAsm_arm(char* cpu_id)
......
...@@ -12,7 +12,7 @@ TEMPLATE = app ...@@ -12,7 +12,7 @@ TEMPLATE = app
# depend on your compiler). Please consult the documentation of the # depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it. # deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.1.0\\\" DEFINES += APP_VERSION=\\\"1.3.0\\\"
QMAKE_LIBDIR += /usr/local/lib QMAKE_LIBDIR += /usr/local/lib
...@@ -23,6 +23,7 @@ INCLUDEPATH+=/usr/local/include/hyper ...@@ -23,6 +23,7 @@ INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human INCLUDEPATH+=/usr/local/include/human
INCLUDEPATH+=/usr/local/include/CImg INCLUDEPATH+=/usr/local/include/CImg
INCLUDEPATH+=/usr/local/include/mqtt
#unix:contains(QMAKE_HOST.arch, x86_64) { #unix:contains(QMAKE_HOST.arch, x86_64) {
# QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib # QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
...@@ -76,6 +77,7 @@ LIBS += -lopencv_core \ ...@@ -76,6 +77,7 @@ LIBS += -lopencv_core \
-lhyperlpr3 \ -lhyperlpr3 \
-lopencv_objdetect \ -lopencv_objdetect \
-lsohuman \ -lsohuman \
-lpaho-mqtt3a \
# -lssl \ # -lssl \
# -lcrypto \ sudo apt-get install libjpeg-dev libpng-dev # -lcrypto \ sudo apt-get install libjpeg-dev libpng-dev
-lc \ -lc \
...@@ -97,7 +99,9 @@ SOURCES += \ ...@@ -97,7 +99,9 @@ SOURCES += \
HumanDetection.cpp \ HumanDetection.cpp \
ScopeSemaphoreExit.cpp \ ScopeSemaphoreExit.cpp \
FaceReconitionHandle.cpp \ FaceReconitionHandle.cpp \
AlgorithmTaskManage.cpp AlgorithmTaskManage.cpp \
BaseAlgorithm.cpp \
MqttSubscriber.cpp
HEADERS += \ HEADERS += \
Common.h \ Common.h \
...@@ -115,7 +119,9 @@ HEADERS += \ ...@@ -115,7 +119,9 @@ HEADERS += \
HumanDetection.h \ HumanDetection.h \
ScopeSemaphoreExit.h \ ScopeSemaphoreExit.h \
FaceReconitionHandle.h \ FaceReconitionHandle.h \
AlgorithmTaskManage.h AlgorithmTaskManage.h \
BaseAlgorithm.h \
MqttSubscriber.h
#FORMS += \ #FORMS += \
# mainwindow.ui # mainwindow.ui
......
...@@ -7,6 +7,7 @@ ...@@ -7,6 +7,7 @@
#include "VidesData.h" #include "VidesData.h"
#include "MediaFaceImage.h" #include "MediaFaceImage.h"
#include "AlgorithmTaskManage.h" #include "AlgorithmTaskManage.h"
#include "MqttSubscriber.h"
#include <algorithm> #include <algorithm>
#include <QString> #include <QString>
#include <QTextCodec> #include <QTextCodec>
...@@ -40,10 +41,10 @@ public: ...@@ -40,10 +41,10 @@ public:
void initFaceFaceRecognition(); void initFaceFaceRecognition();
void initCameras(vides_data::cameraParameters &parameter, void initCameras(vides_data::cameraParameters &parameter, vides_data::responseConfig &devConfig, const std::list<vides_data::responseArea>&areas,std::list<vides_data::requestCameraInfo>&camera_info_list);
const std::list<vides_data::responseArea>&areas,int algorithm,std::list<vides_data::requestCameraInfo>&camera_info_list); __uint8_t intToUint8t(bool faceAlgorithm,bool licensePlateAlgorithm,bool uniformAlgorithm);
__uint8_t intToUint8t(int algorithm); //盒子参数更新
void divParameterUpdate(vides_data::responseConfig &cloudConfig );
static MainWindow * sp_this; static MainWindow * sp_this;
...@@ -62,11 +63,13 @@ public: ...@@ -62,11 +63,13 @@ public:
void findLocalSerialNumber(QString &serialNumber); void findLocalSerialNumber(QString &serialNumber);
void initDevConfigSyn(CameraHandle *cameraHandle); void initDevConfigSyn(CameraHandle *cameraHandle,vides_data::responseConfig &devConfig);
void initRecordingToString(QString &recorJson);
void iniRecordingToString(QString &recorJson); void initDeviceEncodeToString(vides_data::responseConfig &source, QString &targetCodeJson);
void iniEncodeToString(QString &enCodeJson); void initEncodeToString(QString &enCodeJson);
void clearOfflineCameraHandle(QString sDevId, int nDevPort); void clearOfflineCameraHandle(QString sDevId, int nDevPort);
...@@ -89,8 +92,6 @@ private slots: ...@@ -89,8 +92,6 @@ private slots:
void clearHandle(QString sDevId, int nDevPort); void clearHandle(QString sDevId, int nDevPort);
void deleteMkvFileTimer();
void handleMatNewConnection(); void handleMatNewConnection();
private: private:
//Ui::MainWindow *ui; //Ui::MainWindow *ui;
...@@ -99,8 +100,6 @@ private: ...@@ -99,8 +100,6 @@ private:
QTimer *deleteLogFileTimer; QTimer *deleteLogFileTimer;
QTimer *deleteFrameFileTimer;
QTimer*dePermissionSynTimer; QTimer*dePermissionSynTimer;
QTcpServer server; QTcpServer server;
...@@ -116,5 +115,10 @@ private: ...@@ -116,5 +115,10 @@ private:
std::map<QString,CameraHandle*>faceDetectionParkingPushs; std::map<QString,CameraHandle*>faceDetectionParkingPushs;
vides_data::responseConfig config;
vides_data::MqttConfig mqttConfig;
}; };
#endif // MAINWINDOW_H #endif // MAINWINDOW_H
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment