Commit 9a1411ee by “liusq”

新增mqtt和增加调度器日志

parent 381449c0
#include "AlgorithmTaskManage.h"
AlgorithmTaskManage::AlgorithmTaskManage(int humanDetectionLen,int licensePlateLen,int faceLen ):semaphore(humanDetectionLen),
plateSemaphore(licensePlateLen),faceSemaphore(faceLen){
this->humanDetectionLen=humanDetectionLen;
this->licensePlateLen=licensePlateLen;
this->faceLen=faceLen;
AlgorithmTaskManage::AlgorithmTaskManage():isShuttingDown(false){
}
void AlgorithmTaskManage::initialize(int humanDetectionLen, int licensePlateLen, int faceLen,bool first,__uint8_t algorithmPermissions){
if(first){
this->humanDetectionLen=humanDetectionLen;
this->licensePlateLen=licensePlateLen;
this->faceLen=faceLen;
semaphore =new QSemaphore(humanDetectionLen);
plateSemaphore =new QSemaphore(licensePlateLen);
faceSemaphore = new QSemaphore(faceLen);
}else{
Common & instace= Common::getInstance();
if (algorithmPermissions == 0x00) {
this->humanDetectionLen=humanDetectionLen;
if(semaphore!=nullptr){
instace.deleteObj(semaphore);
}
semaphore =new QSemaphore(humanDetectionLen);
}
if(algorithmPermissions == 0x01){
this->licensePlateLen=licensePlateLen;
if(plateSemaphore!=nullptr){
instace.deleteObj(plateSemaphore);
}
plateSemaphore =new QSemaphore(licensePlateLen);
}
if(algorithmPermissions ==0x02){
this->faceLen=faceLen;
if(faceSemaphore!=nullptr){
instace.deleteObj(faceSemaphore);
}
faceSemaphore = new QSemaphore(faceLen);
}
}
}
void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,float &height_reference,int &uniformColor) {
float carShapeConfidence,int &uniformColor) {
for (int i = 0; i < humanDetectionLen; ++i) {
HumanDetection* human=new HumanDetection(modelPaths,carShapeConfidence);
human->setHuManParameter(height_reference,uniformColor);
human->setHuManParameter(uniformColor);
humanDetections.emplace_back(human);
}
}
......@@ -28,12 +61,15 @@ void AlgorithmTaskManage::initFaceReconitionHandle(std::map<QString,QString>&map
for (int i = 0; i < faceLen; ++i) {
FaceReconitionHandle *faceReconitionHandle= new FaceReconitionHandle();
faceReconitionHandle->initSourceImageMap(maps,numberFaces,confidence);
faceReconitionHandles.emplace_back(faceReconitionHandle);
}
}
void AlgorithmTaskManage::modifyImageFeature(std::map<QString,QString>&maps,int numberFaces,float confidence,bool isNull){
if (isShuttingDown.load(std::memory_order_acquire)) {
return ;
}
std::lock_guard<std::mutex> lock(mtxFace);
for (FaceReconitionHandle* face : faceReconitionHandles) {
face->setImageChanged(true);
......@@ -59,6 +95,44 @@ AlgorithmTaskManage::~AlgorithmTaskManage(){
}
}
void AlgorithmTaskManage::releaseResources(
int newHumanDetectionLen, int newLicensePlateLen, int newFaceLen,const QString &odelPaths,
float humanCarShapeConfidence,
int uniformColor,
float licensePlateCarConfidence,
std::map<QString,QString>& faceMaps,
int numberFaces,
float faceConfidence,
__uint8_t algorithmPermissions) {
Common & instance = Common::getInstance();
isShuttingDown.store(true, std::memory_order_release);
ScopeSemaphoreExit guard([this]() {
isShuttingDown.store(false, std::memory_order_release);
});
qInfo()<<"修改参数:releaseResources "<<algorithmPermissions;
// 穿工服算法参数更新
if ((algorithmPermissions & 0x01 << 2) != 0) {
resetSemaphoreAndClearObjects(instance,semaphore, humanDetections, humanDetectionLen);
initialize(newHumanDetectionLen, newLicensePlateLen, newFaceLen, false, 0x00);
initHumanDetectionManage(odelPaths, humanCarShapeConfidence, uniformColor);
}
// 人脸算法参数更新
if ((algorithmPermissions & 0x01 << 1) != 0) {
resetSemaphoreAndClearObjects(instance,faceSemaphore, faceReconitionHandles, faceLen);
initialize(newHumanDetectionLen, newLicensePlateLen, newFaceLen, false, 0x02);
initFaceReconitionHandle(faceMaps, numberFaces, faceConfidence);
}
// 车牌算法参数更新
if ((algorithmPermissions & 0x01) != 0) {
resetSemaphoreAndClearObjects(instance,plateSemaphore, licensePlateRecognitions, licensePlateLen);
initialize(newHumanDetectionLen, newLicensePlateLen, newFaceLen, false, 0x01);
initLicensePlateManage(odelPaths, licensePlateCarConfidence);
}
}
void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) {
if (scheType == 0x01) {
return schedulingAlgorithmTemplate(humanDetections, mtxHuman);
......@@ -73,9 +147,12 @@ void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) {
}
void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces,QString sSn){
faceSemaphore.acquire();
if (isShuttingDown.load(std::memory_order_acquire)) {
return ;
}
faceSemaphore->acquire();
ScopeSemaphoreExit guard([this]() {
faceSemaphore.release(); // 释放信号量
faceSemaphore->release(); // 释放信号量
});
auto* selectedFaceReconition = static_cast<FaceReconitionHandle*>(schedulingAlgorithm(0x03));
if (selectedFaceReconition!=nullptr && !selectedFaceReconition->getImageChanged()) {
......@@ -84,7 +161,7 @@ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,s
qInfo() << "人脸识别算法抢到===>sn"<<sSn<<selectedFaceReconition;
selectedFaceReconition->doesItExistEmployee(source, faces);
} else {
qInfo() << "没有可用的LicensePlateRecognition对象可以调度";
qInfo() << "没有可用的selectedFaceReconition对象可以调度";
return ;
}
}
......@@ -92,9 +169,12 @@ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,s
void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime,QString sSn){
plateSemaphore.acquire();
if (isShuttingDown.load(std::memory_order_acquire)) {
return ;
}
plateSemaphore->acquire();
ScopeSemaphoreExit guard([this]() {
plateSemaphore.release(); // 释放信号量
plateSemaphore->release(); // 释放信号量
});
auto* selectedLicensePlate = static_cast<LicensePlateRecognition*>(schedulingAlgorithm(0x02));
if (selectedLicensePlate!=nullptr) {
......@@ -103,24 +183,27 @@ void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, Q
qInfo() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate;
selectedLicensePlate->licensePlateNumber(source, lpNumber,plate, currentTime);
} else {
qInfo() << "没有可用的LicensePlateRecognition对象可以调度";
qInfo() << "没有可用的selectedLicensePlate对象可以调度";
return ;
}
}
int AlgorithmTaskManage::executeFindHuManCar(const cv::Mat &source, int res,
std::vector<vides_data::ParkingArea> &currentPlate,std::map<int,int>&resMap,QString sSn) {
semaphore.acquire();
std::vector<vides_data::ParkingArea> &currentPlate,std::map<int,int>&resMap,QString sSn,float & heightReference) {
if (isShuttingDown.load(std::memory_order_acquire)) {
return -2;
}
semaphore->acquire();
ScopeSemaphoreExit guard([this]() {
semaphore.release(); // 释放信号量
semaphore->release(); // 释放信号量
});
auto* selectedHumanDetection = static_cast<HumanDetection*>(schedulingAlgorithm(0x01));
if (selectedHumanDetection!=nullptr) {
if (selectedHumanDetection != nullptr ) {
selectedHumanDetection->setIsRunning(true);
// 调用选定对象的findHuManCar函数
qInfo() << "人形调度算法抢到===>sn"<<sSn<<"res"<<selectedHumanDetection;
int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, currentPlate);
int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, heightReference,currentPlate);
return detectionResult;
} else {
qInfo() << "没有可用的HumanDetection对象可以调度";
......
......@@ -14,35 +14,48 @@
#include <mutex>
class AlgorithmTaskManage{
public:
AlgorithmTaskManage(int humanDetectionLen,int licensePlateLen,int faceLen);
AlgorithmTaskManage();
~AlgorithmTaskManage();
static AlgorithmTaskManage& getInstance(int humanDetectionLen,int licensePlateLen,int faceLen)
static AlgorithmTaskManage& getInstance()
{
static AlgorithmTaskManage instance(humanDetectionLen,licensePlateLen,faceLen);
static AlgorithmTaskManage instance;
return instance;
}
void initialize(int humanDetectionLen, int licensePlateLen, int faceLen,bool first,__uint8_t algorithmPermissions );
void initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,float &height_reference,int &uniformColor);
float carShapeConfidence,int &uniformColor);
void initLicensePlateManage(const QString &modelPaths,
float carConfidence);
void modifyImageFeature(std::map<QString,QString>&maps,int numberFaces,float confidence,bool isNull);
void initFaceReconitionHandle(std::map<QString,QString>&maps,int numberFaces,float confidence);
void *schedulingAlgorithm(int scheType);
void releaseResources(int newHumanDetectionLen, int newLicensePlateLen, int newFaceLen, const QString &odelPaths,
float humanCarShapeConfidence,
int uniformColor,
float licensePlateCarConfidence,
std::map<QString,QString>& faceMaps,
int numberFaces,
float faceConfidence,
__uint8_t algorithmPermissions);
int executeFindHuManCar(const cv::Mat &source,int res,std::vector<vides_data::ParkingArea> &currentPlate,
std::map<int,int>&resMap, QString sSn);
std::map<int,int>&resMap, QString sSn,float & heightReference);
void executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime,QString sSn);
void executeFindDoesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces,QString sSn);
private:
template<typename T>
T* schedulingAlgorithmTemplate(std::vector<T*>& objects, std::mutex& mtx) {
......@@ -50,7 +63,6 @@ private:
qint64 currentTime = QDateTime::currentMSecsSinceEpoch();
qint64 maxWaitTime = 0;
std::vector<T*> schedulableObjects;
for (T* obj : objects) {
if (obj->getIsRunning()) continue;
qint64 waitTime = std::abs(currentTime - obj->getThreadTime());
......@@ -65,45 +77,65 @@ private:
if (schedulableObjects.empty()) {
return nullptr;
}
if (schedulableObjects.size() == 1) {
return schedulableObjects.at(0);
T* selected = schedulableObjects.at(0);
selected->setIsRunning(true); // 立刻标记为正在运行
return selected;
}
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)];
T* selected = schedulableObjects[dis(gen)];
selected->setIsRunning(true); // 立刻标记为正在运行
return selected;
}
template<typename T>
void resetSemaphoreAndClearObjects(Common& instance, QSemaphore*& semaphore, std::vector<T*>& containers, int len) {
if (semaphore != nullptr) {
semaphore->acquire(len);
for (auto obj : containers) {
do {
if (!obj->getIsRunning()) {
instance.deleteObj(obj);
break;
}
} while (obj->getIsRunning());
}
containers.clear();
semaphore->release(len);
instance.deleteObj(semaphore);
}
}
static AlgorithmTaskManage* instance;
std::vector<HumanDetection*>humanDetections;
std::vector<LicensePlateRecognition*>licensePlateRecognitions;
std::vector<FaceReconitionHandle*>faceReconitionHandles;
int humanDetectionLen;
int licensePlateLen;
int faceLen;
QSemaphore semaphore;
QSemaphore plateSemaphore;
QSemaphore faceSemaphore;
QSemaphore *semaphore;
QSemaphore *plateSemaphore;
QSemaphore *faceSemaphore;
std::mutex mtxHuman;
std::mutex mtxLicense;
std::mutex mtxFace;
std::atomic<bool> isShuttingDown;
};
#endif // HUMANDETECTIONMANAGE_H
#include "BaseAlgorithm.h"
BaseAlgorithm::BaseAlgorithm():thread_time(0){
}
BaseAlgorithm::~BaseAlgorithm(){
}
qint64 BaseAlgorithm::getThreadTime()const{
return thread_time.load(std::memory_order_acquire);
}
bool BaseAlgorithm::getIsRunning()const{
return isRunning.load(std::memory_order_acquire);
}
void BaseAlgorithm::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_release);
}
#ifndef BASEALGORITHM_H
#define BASEALGORITHM_H
#include <QDateTime>
#include <atomic>
class BaseAlgorithm {
protected:
std::atomic<qint64>thread_time;
std::atomic<bool> isRunning{false};
public:
BaseAlgorithm();
virtual ~BaseAlgorithm();
qint64 getThreadTime()const;
bool getIsRunning()const;
void setIsRunning(bool running);
};
#endif // BASEALGORITHM_H
......@@ -7,7 +7,7 @@ CameraHandle::CameraHandle(){
}
CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &channel,
int imageSave)
int imageSave,float &heightReference,vides_data::responseConfig &devConfig)
: hDevice(-1),
url(url),
loginParam(new SXSDKLoginParam()),
......@@ -18,10 +18,13 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch
dev_snap_syn_timer(new QTimer()),
image_save(imageSave),
faceCount(0),
semaphore(1) {
semaphore(1),
heightReference(heightReference),
devConfig(devConfig)
{
connect(this, SIGNAL(afterDownloadFile(int,int,QString)), this, SLOT(pushRecordToCloud(int,int,QString)),Qt::QueuedConnection);
faceMapWorker.setX(0);
faceMapWorker.setY(0);
......@@ -31,17 +34,17 @@ CameraHandle::~CameraHandle() {
Common & instace= Common::getInstance();
dev_snap_syn_timer->stop();
QThreadPool::globalInstance()->waitForDone();
instace.deleteObj(dev_snap_syn_timer);
instace.deleteObj(loginParam);
instace.deleteObj(sxMediaFaceImageReq);
for(auto iter = parkMap.begin(); iter != parkMap.end(); ++iter) {
instace.deleteObj( iter->second);
}
parkMap.clear();
XSDK_DevLogout(this->hDevice);
}
int CameraHandle::sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout) {
......@@ -222,10 +225,54 @@ int CameraHandle::getHdevice() {
return hDevice;
}
void CameraHandle::setCarConfidenceMaxAndMin(float carConfidenceMax,float carConfidenceMin){
this->carConfidenceMax=carConfidenceMax;
this->carConfidenceMin=carConfidenceMin;
}
void CameraHandle::getCurrentFrame(std::vector<uchar> &buffer){
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
mediaFaceImage->CameraImage(this->hDevice,this->channel,buffer);
}
//相机参数更新
void CameraHandle::cameraParameterUpdate(vides_data::responseConfig &cloudConfig){
bool forMat=false;
if(devConfig.mainFormat.updateAt!=cloudConfig.mainFormat.updateAt){
devConfig.mainFormat=cloudConfig.mainFormat;
forMat=true;
}
if(devConfig.extraFormat.updateAt!=cloudConfig.extraFormat.updateAt){
devConfig.extraFormat=cloudConfig.extraFormat;
forMat=true;
}
if(forMat){
deviceReboot(true);
}
if(devConfig.camera.updateAt!=cloudConfig.camera.updateAt){
if(devConfig.camera.username!=cloudConfig.camera.username ||
devConfig.camera.password!=cloudConfig.camera.password ){
QString ip=QString::fromUtf8(loginParam->sDevId);
MainWindow::sp_this->clearOfflineCameraHandle(ip,loginParam->nDevPort);
}else {
this->heightReference=cloudConfig.camera.heightReference;
this->image_save=cloudConfig.camera.imageSave;
if(this->devConfig.camera.devSnapSynTimer!= cloudConfig.camera.devSnapSynTimer){
dev_snap_syn_timer->stop(); // 停止当前定时器
dev_snap_syn_timer->setInterval(cloudConfig.camera.devSnapSynTimer); // 设置新的定时器时间
dev_snap_syn_timer->start(); // 启动定时器
}
}
}
if(this->face_frequency!=cloudConfig.faceConfig.faceFrequency){
this->face_frequency=cloudConfig.faceConfig.faceFrequency;
}
float carConfidenceMax=cloudConfig.licensePlateConfig.carConfidenceMax;
float carConfidenceMin=cloudConfig.licensePlateConfig.carConfidenceMin;
this->setCarConfidenceMaxAndMin(carConfidenceMax,carConfidenceMin);
}
void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 face_frequency) {
......@@ -235,19 +282,19 @@ void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 fa
this->face_frequency=face_frequency;
dev_snap_syn_timer->start(syn_timer);
}
void CameraHandle::sdkRealTimeDevSnapSyn(int hDevice) {
QThreadPool* threadPool = QThreadPool::globalInstance();
auto taskSyn = [this, hDevice]() {
sdkDevSnapSyn(hDevice, this->channel);
};
qInfo() << "当前活跃线程数:" << threadPool->activeThreadCount();
if (threadPool->activeThreadCount() >= threadPool->maxThreadCount()) {
qInfo() << "任务积压,跳过本次执行";
return;
}
auto taskRunnable = new TaskRunnable(taskSyn, hDevice, this->channel, RunFunction::SdkDevSnapSyn);
threadPool->start(taskRunnable);
}
QString CameraHandle::getSSn(){
......@@ -266,6 +313,10 @@ std::map<QString, QString>&CameraHandle::getCurrentData(){
std::map<int, vides_data::responseRecognitionData>&CameraHandle::getVideoCurrentData(){
return videoCurrentData;
}
vides_data::responseConfig &CameraHandle::getDeviceConfig(){
return devConfig;
}
void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
QString startTimer, QString endTime){
if(mediaHandle>0){
......@@ -373,21 +424,25 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch();
int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image);
qDebug() << "SdkDevSnapSyn HTTP POST request to: " << sSn;
if (ret < 0) {
offlineCount++; // 累加计数器
qDebug() << "offlineCount: " << loginParam->sDevId<<offlineCount;
if (offlineCount >= 3) { // 判断是否连续3次返回0
qInfo() << "设备离线";
QString ip=QString::fromUtf8(loginParam->sDevId);
MainWindow::sp_this->clearOfflineCameraHandle(ip,loginParam->nDevPort);
bool is_ping=vides_data::pingAddress(ip);
if(!is_ping){
deviceReboot(false);
}else {
deviceReboot(true);
}
// 执行离线处理逻辑
// TODO: 可以在此处更新设备状态、发送告警通知等
// 重置计数器,以便下次再次检测连续离线
offlineCount = 0;
return;
}
} else {
......@@ -433,18 +488,13 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
}else {
//当前为空,离场
if(newInfo.getLicensePlate().length()<=0){
Common & instace= Common::getInstance();
int humanLen=instace.getHumanDetectionLen();
int licensePlateLen=instace.getLicensePlateLen();
int faceLen=instace.getFaceLen();
AlgorithmTaskManage &algorithmTaskManage=AlgorithmTaskManage::getInstance(humanLen,licensePlateLen,faceLen);
AlgorithmTaskManage &algorithmTaskManage=AlgorithmTaskManage::getInstance();
std::vector<vides_data::ParkingArea> currentPlates;
std::map<int,int>resMap;
int car_size =algorithmTaskManage.executeFindHuManCar(frame,0x01,currentPlates,resMap,sSn);
int car_size =algorithmTaskManage.executeFindHuManCar(frame,0x01,currentPlates,resMap,sSn,heightReference);
qInfo()<<sSn<<":"<<"当前车形数量:"<<car_size;
if (car_size <= 0 && car_size!=-2) {
qInfo() << sSn<<"区域:"<<park->getSpaceIndex() << ": 出场:";
//如果有车辆检测到并且不在停车区域内部,视为出场
......@@ -456,9 +506,6 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
qInfo() << sSn << ": no出场:" << car_size;
}
}else{
qInfo()<<sSn<<":"<<"老车出场:"<<park->getCurrentPlate().getLicensePlate();
qInfo()<<sSn<<":"<<"新车入场:"<<newInfo.getLicensePlate();
//当前不为空,新车,新车入场,老车出场
//exitAndMoMap[Exit]=park->getCurrentPlate();
//exitAndMoMap[Mobilization]=newInfo;
......@@ -484,7 +531,7 @@ void CameraHandle::batchRegionalPushLicensePlate(QByteArray &imgs,qint64 current
void CameraHandle::matToAreaMask(const cv::Mat &source, std::map<int, cv::Mat> &maskFrame) {
Common & instace= Common::getInstance();
for (auto iter = parkMap.begin(); iter != parkMap.end(); ++iter) {
int id = iter->first;
ParkingSpaceInfo* parkArea = iter->second;
......@@ -498,7 +545,7 @@ void CameraHandle::matToAreaMask(const cv::Mat &source, std::map<int, cv::Mat> &
int bottomRightY = instace.clamp(static_cast<int>(parkArea->getArea().bottomRightCornerY), 0, source.rows - 1);
int bottomLeftX = instace.clamp(static_cast<int>(parkArea->getArea().bottomLeftCornerX), 0, source.cols - 1);
int bottomLeftY = instace.clamp(static_cast<int>(parkArea->getArea().bottomLeftCornerY), 0, source.rows - 1);
std::vector<cv::Point> parkAreaPoints = {
cv::Point(topLeftX, topLeftY),
cv::Point(topRightX, topRightY),
......@@ -534,44 +581,35 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
qInfo()<<"=============================>";
int width = frame.cols; // 获取图像宽度
int height = frame.rows; // 获取图像高度
int humanlen=instace.getHumanDetectionLen();
qInfo()<<"frame 宽度:"<<width<<"frame 高度:"<<height;
int licensePlateLen=instace.getLicensePlateLen();
int faceLen=instace.getFaceLen();
AlgorithmTaskManage &algorithmTaskManage=AlgorithmTaskManage::getInstance(humanlen,licensePlateLen,faceLen);
AlgorithmTaskManage &algorithmTaskManage=AlgorithmTaskManage::getInstance();
std::map<QString,vides_data::requestFaceReconition> mapFaces;
QByteArray imgs;
this->matToBase64(frame, imgs);
HttpService httpService(httpUrl);
int faSize =0;
std::vector<vides_data::ParkingArea> currentPlates;
int uniforms=0x00;
std::map<int,int>resMap;
//穿工服算法
if ((algorithmPermissions & 0x01<<2) != 0) {
uniforms=algorithmTaskManage.executeFindHuManCar(frame,0x02,currentPlates,resMap,sSn);
if(currentPlates.size()>0x00){
uniforms=algorithmTaskManage.executeFindHuManCar(frame,0x02,currentPlates,resMap,sSn,heightReference);
if(resMap.size()>0x00){
faSize=resMap.at(0x00);
uniforms=resMap.at(0x02);
}
}else{
//人形
uniforms=algorithmTaskManage.executeFindHuManCar(frame,0x00,currentPlates,resMap,sSn);
if(currentPlates.size()>0x00){
uniforms=algorithmTaskManage.executeFindHuManCar(frame,0x00,currentPlates,resMap,sSn,heightReference);
if(resMap.size()>0x00){
faSize=resMap.at(0x00);
uniforms=faSize;
}
}
qInfo()<<"人脸数量==>"<<faSize;
qInfo()<<"未穿工服数量==>"<<uniforms;
if(uniforms==-2 || faSize==-2){
qInfo() << "没有可用的HumanDetection对象可以调度";
return ;
......@@ -589,18 +627,14 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
qInfo()<<"人数变化推送信息推送失败";
}
instace.deleteObj(resp);
faceMapWorker.setX(faSize);
faceMapWorker.setY(uniforms);
}
}
if(faSize>0 && (algorithmPermissions & 0x01<<1) != 0){
qInfo() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId()<<sSn;
std::list<vides_data::faceRecognitionResult>faces;
algorithmTaskManage.executeFindDoesItExistEmployee(frame,faces,sSn);
qInfo()<<"识别的人脸数量==>"<<faces.size();
if (faces.size()>0) {
for(auto face:faces){
vides_data::requestFaceReconition faceReconition;
......@@ -630,7 +664,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
}
}
}
if ((algorithmPermissions & 0x01<<2) != 0) {
if(uniforms>0 ){
//未穿工服的人数
......@@ -704,7 +738,6 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
}
faceCount.fetch_add(1, std::memory_order_relaxed);
qInfo()<<"faceCount==>"<<faceCount.load(std::memory_order_relaxed);
for (auto it = areaMat.begin(); it != areaMat.end(); ++it) {
int key = it->first;
cv::Mat areaMat = it->second;
......@@ -718,12 +751,12 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
}
vides_data::requestLicensePlate resultPlate;
resultPlate.sn=sSn;
algorithmTaskManage.executeFindlicensePlateNumber(areaMat, lpNumber,resultPlate,currentTime,sSn);
std::list<vides_data::LicensePlate>ps =resultPlate.plates;
qInfo()<<QString("sn==>%1,区域:%2识别的车牌信息是:%3").arg(sSn).arg(key).
arg(lpNumber);
arg(lpNumber);
if(ps.size()==0){
int res=-1;
if(value==nullptr){
......@@ -752,7 +785,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
}
vides_data::LicensePlate maxPlate;
LicensePlateRecognition licensePlateRecognitionNew;
licensePlateRecognitionNew.filterLicensePlateConfidenceMax(resultPlate,maxPlate);
RecognizedInfo recognizedInfo;
......@@ -766,7 +799,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
qInfo()<<sSn<<"==>非绿牌蓝牌车牌号:"<<maxPlate.new_plate<<"===>recognition.new_plate.length():"<<maxPlate.new_plate.length();
continue;
}
if(maxPlate.text_confidence>=instace.getCarConfidenceMax()){
if(maxPlate.text_confidence>=carConfidenceMax){
if(value->getQueue().size()>=7 && value->getQueue().size()<=10) {
for (int i = 0; i < 3; ++i) {
value->removeQueue();
......@@ -779,12 +812,12 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
}
this->checkAndUpdateCurrentPlate(value,areaMat,recognizedInfo,res);
}
if(maxPlate.text_confidence<=instace.getCarConfidenceMin()){
qInfo()<<sSn<<"==>recognition.text_confidence<=instace.getCarConfidenceMin"<<instace.getCarConfidenceMin();
if(maxPlate.text_confidence<=carConfidenceMin){
qInfo()<<sSn<<"==>recognition.text_confidence<=instace.getCarConfidenceMin"<<carConfidenceMin;
continue;
}
if(maxPlate.text_confidence>instace.getCarConfidenceMin()
&& maxPlate.text_confidence<instace.getCarConfidenceMax())
if(maxPlate.text_confidence>carConfidenceMin
&& maxPlate.text_confidence<carConfidenceMax)
{
if(value->getQueue().size()>=10) {
value->removeQueue();
......@@ -806,22 +839,10 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
maxPlate.img=imgs;
maxPlate.new_color=recognizedInfo.getColor();
newPlate.plates.push_back(std::move(maxPlate));
// RecognizedInfo exitInfo=exitMoMap[Exit];
// vides_data::LicensePlate oldInfo;
// oldInfo.areaLocation=value->getArea();
// //oldInfo.img=imgs;
// oldInfo.new_color=exitInfo.getColor();
// oldInfo.new_plate=exitInfo.getLicensePlate();
// oldInfo.time=exitInfo.getRecognizeTime();
// newPlate.plates.push_back(std::move(oldInfo));
}
}
}
qDebug()<<QString("%1==>当前车牌数量:%2").arg(sSn).arg(newPlate.plates.size());
if(newPlate.plates.size()>0){
foreach (auto var, newPlate.plates) {
qInfo()<<QString("sn:%1 =>识别的车牌号是:%2").arg(sSn).arg(var.new_plate);
......@@ -889,11 +910,11 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat
void CameraHandle::printWifi(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg){
char szOutBuffer[4000] = { 0 };
int nInOutSize = sizeof(szOutBuffer);
// 获取并解析配置
int nResult = XSDK_DevGetSysConfigSyn(hDevice, JK_NetWork_Wifi, szOutBuffer, &nInOutSize, 3000, EXCMD_CONFIG_GET);
qDebug()<<szOutBuffer;
if (nResult >= 0) {
cfg.Parse(szOutBuffer);
} else {
......@@ -902,15 +923,15 @@ void CameraHandle::printWifi(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg){
}
void CameraHandle::sdkWifi(QString &pwd,QString &ssid){
XSDK_CFG::NetWork_Wifi wif;
printWifi(hDevice,wif);
QByteArray && byPwd = pwd.toUtf8();
const char * cpwd= byPwd.data();
wif.Keys.SetValue(cpwd);
QByteArray && byJson = ssid.toUtf8();
const char * cssid= byJson.data();
wif.SSID.SetValue(cssid);
......@@ -922,9 +943,9 @@ void CameraHandle::sdkWifi(QString &pwd,QString &ssid){
int res =XSDK_DevSetSysConfigSyn(hDevice, JK_NetWork_Wifi, wipCfg, strlen(wipCfg), szOutBuffer, &nLen, 3000, EXCMD_CONFIG_SET);
if(res<0){
qInfo() << "修改wifi失败";
}
deviceReboot();
deviceReboot(false);
}
void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){
......@@ -964,16 +985,93 @@ void CameraHandle::sdkEncodeCfg(const char* pCfg){
qInfo() << "sdkEncodeCfg 配置编码设置->修改失败"<<res;
}
}
void CameraHandle::updateSdkDevSpvMn(vides_data::responseGb28181 *gb28181){
char szOutBuffer[40960]={ 0 };
int nInOutSize = sizeof(szOutBuffer);
int res= XSDK_DevGetSysConfigSyn(hDevice,JK_NetWork_SPVMN,szOutBuffer,&nInOutSize,4000,EXCMD_CONFIG_GET);
if (res >= 0)
{
XSDK_CFG::NetWork_SPVMN config;
config.Parse(szOutBuffer);
const char* Camreaid = config.Camreaid.ToString();
int iHsIntervalTime = config.iHsIntervalTime.ToInt();
int iRsAgedTime = config.iRsAgedTime.ToInt();
int sCsPort = config.sCsPort.ToInt();
const char* szConnPass = config.szConnPass.ToString();
const char* szCsIP = config.szCsIP.ToString();
const char* szDeviceNO = config.szDeviceNO.ToString();
const char* szServerDn = config.szServerDn.ToString();
const char* szServerNo = config.szServerNo.ToString();
bool isEqual = (szCsIP == gb28181->sip_ip &&
sCsPort == gb28181->sip_port &&
szServerNo == gb28181->serial &&
szServerDn ==gb28181->realm &&
iRsAgedTime == gb28181->register_validity &&
iHsIntervalTime == gb28181->heartbeat_interval &&
szConnPass == gb28181->password &&
szDeviceNO == gb28181->device_id &&
Camreaid == gb28181->channel_id);
if(!isEqual){
config.Camreaid.InitArraySize(64);
for (int i = 1; i <= 64; ++i) {
if (i == 1) {
QByteArray b_StrValue = gb28181->channel_id.toUtf8();
const char* str_Value = b_StrValue.constData();
JStrObj* newCameraId = &config.Camreaid[i - 1];
newCameraId->operator=(str_Value);
} else {
QString str("3402000000131000001" + QString::number(i, 10).rightJustified(2, '0'));
QByteArray b_Str = str.toUtf8();
const char* s_ct = b_Str.constData();
JStrObj* newCameraId = &config.Camreaid[i - 1];
newCameraId->operator=(s_ct);
}
}
QByteArray && bSip_ip= gb28181->sip_ip.toUtf8();
char* sip_ip = bSip_ip.data();
config.szCsIP.SetValue(sip_ip);
QByteArray && bSzServerNo=gb28181->serial.toUtf8();
char* sz_ServerNo = bSzServerNo.data();
config.szServerNo.SetValue(sz_ServerNo);
config.sCsPort.SetValue(gb28181->sip_port);
config.sUdpPort.SetValue(5060);
QByteArray && bSzServerDn=gb28181->realm.toUtf8();
char* sz_ServerDn = bSzServerDn.data();
config.szServerDn.SetValue(sz_ServerDn);
config.bCsEnable.SetValue(true);
config.iRsAgedTime.SetValue(gb28181->register_validity);
config.iHsIntervalTime.SetValue(gb28181->heartbeat_interval);
QByteArray && bSzConnPass=gb28181->password.toUtf8();
char* sz_connPass = bSzConnPass.data();
config.szConnPass.SetValue(sz_connPass);
QByteArray && bDevice_id=gb28181->device_id.toUtf8();
char* cdevice_id = bDevice_id.data();
config.szDeviceNO.SetValue(cdevice_id);
const char* pCfg = config.ToString();
sdkDevSpvMn(pCfg);
}
}
}
void CameraHandle::sdkDevSpvMn(const char *spvMn){
char szOutBuffer[512] = { 0 };
int nLen = sizeof(szOutBuffer);
qDebug()<<spvMn;
int res=XSDK_DevSetSysConfigSyn(hDevice,JK_NetWork_SPVMN,spvMn,strlen(spvMn),szOutBuffer,&nLen,3000,EXCMD_CONFIG_SET);
if(res<0){
qInfo() << "sdkDevSpvMn 28181->修改失败"<<res;
}
}
void CameraHandle::deviceReboot(){
void CameraHandle::deviceReboot(bool isCloseHandle){
int nRet=0;
XSDK_CFG::OPMachine cfg;
cfg.Action.SetValue("Reboot");
......@@ -984,8 +1082,13 @@ void CameraHandle::deviceReboot(){
qInfo() << sSn<<"重启相机失败"<<nRet;
return ;
}
if(isCloseHandle){
return ;
}
QString ip=QString::fromUtf8(loginParam->sDevId);
MainWindow::sp_this->clearOfflineCameraHandle(ip,loginParam->nDevPort);}
MainWindow::sp_this->clearOfflineCameraHandle(ip,loginParam->nDevPort);
}
bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2) {
......@@ -1055,50 +1158,50 @@ void CameraHandle::faceUniformOverlap(std::map<QString, vides_data::requestFaceR
std::vector<vides_data::ParkingArea>& uniforms,
std::list<QString>& outUniforms) {
const float epsilon = 1e-5;
for (size_t i = 0; i < uniforms.size(); ++i) {
std::vector<cv::Point2f> uniformAreaPoints = {
cv::Point2f(uniforms[i].topLeftCornerX, uniforms[i].topLeftCornerY),
cv::Point2f(uniforms[i].topRightCornerX, uniforms[i].topRightCornerY),
cv::Point2f(uniforms[i].bottomRightCornerX, uniforms[i].bottomRightCornerY),
cv::Point2f(uniforms[i].bottomLeftCornerX, uniforms[i].bottomLeftCornerY)
};
if (!isClockwise(uniformAreaPoints)) {
std::reverse(uniformAreaPoints.begin(), uniformAreaPoints.end());
}
float maxIntersectionArea = 0.0;
QString maxFaceId;
for (auto iter = mapFaces.begin(); iter != mapFaces.end(); ++iter) {
QString faceId = iter->first; // 人员id
vides_data::requestFaceReconition faceValue = iter->second;
std::vector<cv::Point2f> faceAreaPoints = {
cv::Point2f(faceValue.area.top_left_corner_x, faceValue.area.top_left_corner_y),
cv::Point2f(faceValue.area.top_right_corner_x, faceValue.area.top_right_corner_y),
cv::Point2f(faceValue.area.bottom_right_corner_x, faceValue.area.bottom_right_corner_y),
cv::Point2f(faceValue.area.bottom_left_corner_x, faceValue.area.bottom_left_corner_y)
};
if (!isClockwise(faceAreaPoints)) {
std::reverse(faceAreaPoints.begin(), faceAreaPoints.end());
}
std::vector<cv::Point2f> intersection;
float intersectionArea = cv::intersectConvexConvex(uniformAreaPoints, faceAreaPoints, intersection, true);
if (intersectionArea > maxIntersectionArea) {
maxIntersectionArea = intersectionArea;
maxFaceId = faceId;
}
}
if (!maxFaceId.isEmpty() && maxIntersectionArea > epsilon) {
outUniforms.push_back(maxFaceId);
}
}
for (size_t i = 0; i < uniforms.size(); ++i) {
std::vector<cv::Point2f> uniformAreaPoints = {
cv::Point2f(uniforms[i].topLeftCornerX, uniforms[i].topLeftCornerY),
cv::Point2f(uniforms[i].topRightCornerX, uniforms[i].topRightCornerY),
cv::Point2f(uniforms[i].bottomRightCornerX, uniforms[i].bottomRightCornerY),
cv::Point2f(uniforms[i].bottomLeftCornerX, uniforms[i].bottomLeftCornerY)
};
if (!isClockwise(uniformAreaPoints)) {
std::reverse(uniformAreaPoints.begin(), uniformAreaPoints.end());
}
float maxIntersectionArea = 0.0;
QString maxFaceId;
for (auto iter = mapFaces.begin(); iter != mapFaces.end(); ++iter) {
QString faceId = iter->first; // 人员id
vides_data::requestFaceReconition faceValue = iter->second;
std::vector<cv::Point2f> faceAreaPoints = {
cv::Point2f(faceValue.area.top_left_corner_x, faceValue.area.top_left_corner_y),
cv::Point2f(faceValue.area.top_right_corner_x, faceValue.area.top_right_corner_y),
cv::Point2f(faceValue.area.bottom_right_corner_x, faceValue.area.bottom_right_corner_y),
cv::Point2f(faceValue.area.bottom_left_corner_x, faceValue.area.bottom_left_corner_y)
};
if (!isClockwise(faceAreaPoints)) {
std::reverse(faceAreaPoints.begin(), faceAreaPoints.end());
}
std::vector<cv::Point2f> intersection;
float intersectionArea = cv::intersectConvexConvex(uniformAreaPoints, faceAreaPoints, intersection, true);
if (intersectionArea > maxIntersectionArea) {
maxIntersectionArea = intersectionArea;
maxFaceId = faceId;
}
}
if (!maxFaceId.isEmpty() && maxIntersectionArea > epsilon) {
outUniforms.push_back(maxFaceId);
}
}
}
......@@ -1158,9 +1261,9 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea) {
std::reverse(currentPolygonPoints.begin(), currentPolygonPoints.end());
}
qDebug() << "Current Polygon Points:";
qInfo() << "Current Polygon Points:";
for (const auto& point : currentPolygonPoints) {
qDebug() << "(" << point.x << ", " << point.y << ")";
qInfo() << "(" << point.x << ", " << point.y << ")";
}
for (ParkingSpaceInfo *info : parkingSpaceInfos) {
......@@ -1176,9 +1279,9 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea) {
std::reverse(polygonInfoPoints.begin(), polygonInfoPoints.end());
}
qDebug() << "Polygon Info Points for Space " << info->getSpaceIndex() << ":";
qInfo() << "Polygon Info Points for Space " << info->getSpaceIndex() << ":";
for (const auto& point : polygonInfoPoints) {
qDebug() << "(" << point.x << ", " << point.y << ")";
qInfo() << "(" << point.x << ", " << point.y << ")";
}
std::vector<cv::Point2f> intersection;
......@@ -1299,3 +1402,4 @@ void CameraHandle::updateParkMapAndParkingSpaceInfos(const std::list<vides_data:
parkMap.clear();
initParkingSpaceInfo(newAreas);
}
......@@ -40,7 +40,8 @@ enum CAR_INFORMATION {
class CameraHandle: public QObject {
Q_OBJECT
public:
CameraHandle(QString &url,QString &httpUrl,QString &sSn, int &channel, int imageSave);
CameraHandle(QString &url,QString &httpUrl,QString &sSn, int &channel, int imageSave,
float &heightReference,vides_data::responseConfig &devConfig);
CameraHandle();
~CameraHandle();
int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
......@@ -55,6 +56,9 @@ public:
void clearCameraHandle();
// void rebindTimer(int hDevice);
//相机参数更新
void cameraParameterUpdate(vides_data::responseConfig &cloudConfig);
void initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 face_frequency);
void notificationUpdateImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence);
......@@ -79,6 +83,8 @@ public:
void printWifi(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg);
void setCarConfidenceMaxAndMin(float carConfidenceMax,float carConfidenceMin);
//设置相机连接的wifi
void sdkWifi(QString &pwd,QString &ssid);
//时间设置
......@@ -89,8 +95,10 @@ public:
void sdkEncodeCfg(const char *enCode);
//28181更新
void sdkDevSpvMn(const char* spvMn);
void updateSdkDevSpvMn(vides_data::responseGb28181 *gb28181);
//重启设备
void deviceReboot();
void deviceReboot(bool isCloseHandle );
//获取固件版本
void findFirmwareVersion(QString &firmwareVersion);
......@@ -106,7 +114,8 @@ public:
std::list<QString>&outUniforms);
bool isClockwise(const std::vector<cv::Point2f>& polygon);
QString getSSn();
int getMediaHandle();
void setMediaHandle(int mediaHandle);
......@@ -122,6 +131,8 @@ public:
std::map<QString, QString>&getCurrentData();
vides_data::responseConfig &getDeviceConfig();
bool isChanged(const QPoint& newInfo, const QPoint& current);
// 检查点是否在多边形内
bool polygonsOverlap(ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2);
......@@ -181,6 +192,14 @@ private :
uint64 face_frequency;
__uint8_t algorithmPermissions;
vides_data::responseConfig devConfig;
float heightReference;
float carConfidenceMax;
float carConfidenceMin;
};
......
......@@ -62,35 +62,6 @@ void Common::setImages(QString images){
images.append("/");
this->images=images;
}
float Common::getCarConfidenceMax() const{
return carConfidenceMax;
}
void Common::setCarConfidenceMax(float carConfidenceMax){
this->carConfidenceMax=carConfidenceMax;
}
float Common::getCarConfidenceMin() const{
return carConfidenceMin;
}
void Common::setCarConfidenceMin(float carConfidenceMin){
this->carConfidenceMin=carConfidenceMin;
}
int Common::getHumanDetectionLen() const{
return humanDetectionLen;
}
int Common::getLicensePlateLen() const{
return licensePlateLen;
}
int Common::getFaceLen() const{
return faceLen;
}
void Common::setTaskManage(int humanDetectionLen,int licensePlateLen,int faceLen){
this->humanDetectionLen=humanDetectionLen;
this->licensePlateLen=licensePlateLen;
this->faceLen=faceLen;
}
QString Common::GetLocalIp() {
QString ipAddress;
QList<QHostAddress> list = QNetworkInterface::allAddresses();
......
......@@ -44,17 +44,6 @@ public:
QString GetLocalIp();
float getCarConfidenceMax() const;
void setCarConfidenceMax(float carConfidenceMax);
float getCarConfidenceMin() const;
void setCarConfidenceMin(float carConfidenceMin);
int getHumanDetectionLen() const;
int getLicensePlateLen() const;
int getFaceLen() const;
void setTaskManage(int humanDetectionLen,int licensePlateLen,int faceLen);
template <typename T>
const T& clamp(const T& v, const T& lo, const T& hi)
{
......@@ -72,11 +61,6 @@ private:
QString videoOut;
QString videoDownload;
QString images;
float carConfidenceMax;
float carConfidenceMin;
int humanDetectionLen;
int licensePlateLen;
int faceLen;
Common();
~Common();
......
......@@ -11,27 +11,19 @@ using namespace cimg_library;
FaceReconitionHandle::FaceReconitionHandle() {
}
FaceReconitionHandle::~FaceReconitionHandle(){
static int i=0;
if (ctxHandle != nullptr) {
HF_ReleaseFaceContext(ctxHandle);
qInfo()<<"人脸析构"<<++i;
ctxHandle = nullptr;
}
}
qint64 FaceReconitionHandle::getThreadTime() const{
return thread_time.load(std::memory_order_acquire);
}
bool FaceReconitionHandle::getIsRunning() const{
return isRunning.load(std::memory_order_acquire);
}
void FaceReconitionHandle::setIsRunning(bool running){
this->isRunning.store(running, std::memory_order_release);
}
bool FaceReconitionHandle::getImageChanged()const{
return isImageChanged.load(std::memory_order_acquire);
}
......@@ -48,7 +40,7 @@ cv::Mat FaceReconitionHandle::loadImage(const QString &path) {
qDebug() << "图像以OpenCV成功加载。";
return image;
}
return loadImageFromByteStream(path);
}
......@@ -66,7 +58,7 @@ void FaceReconitionHandle::initSourceImageMap(std::map<QString,QString>&maps,int
QString bPath = QCoreApplication::applicationDirPath() + "/model_zip/T1_5";
#else
#error "不支持的架构"
#endif
QByteArray && bypath = bPath.toUtf8();
char* spath = bypath.data();
......@@ -84,7 +76,7 @@ void FaceReconitionHandle::initSourceImageMap(std::map<QString,QString>&maps,int
customIds.clear();
int i = 0;
qInfo()<< "加载图像size: "<<maps.size();
for (auto it = maps.begin(); it != maps.end(); ++it,++i) {
const QString& key = it->first;
QString& value = it->second;
......@@ -99,7 +91,7 @@ void FaceReconitionHandle::initSourceImageMap(std::map<QString,QString>&maps,int
imageData.width = image.cols;
imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
this->configConfidence=confidence;
......@@ -108,25 +100,25 @@ void FaceReconitionHandle::initSourceImageMap(std::map<QString,QString>&maps,int
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) {
qInfo() << QString("initSourceImageMap:未检测到人脸: %1").arg(key);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
HF_FaceFeature feature = {0};
ret = HF_FaceFeatureExtract(ctxHandle, imageSteamHandle, multipleFaceData.tokens[0], &feature);
if (ret != HSUCCEED) {
qInfo() << QString("特征提取出错: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
char* tagName = new char[key.size() + 1];
std::strcpy(tagName, key.toStdString().c_str());
HF_FaceFeatureIdentity identity = {0};
......@@ -134,16 +126,16 @@ void FaceReconitionHandle::initSourceImageMap(std::map<QString,QString>&maps,int
identity.customId = i;
customIds.push_back( identity.customId);
identity.tag = tagName;
ret = HF_FeaturesGroupInsertFeature(ctxHandle, identity);
if (ret != HSUCCEED) {
qInfo() << QString("插入失败: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
delete[] tagName;
ret = HF_ReleaseImageStream(imageSteamHandle);
if (ret == HSUCCEED) {
imageSteamHandle = nullptr;
......@@ -167,18 +159,18 @@ void FaceReconitionHandle::featureRemove(){
cv::Mat FaceReconitionHandle::loadImageFromByteStream(const QString& filePath) {
try {
// 使用 CImg 读取 JPEG 图像
QByteArray bPath =filePath.toUtf8();
const char* ctr=bPath.data();
CImg<unsigned char> cimg_image(ctr);
// 将 CImg 对象转换为 OpenCV 的 Mat 格式
int width = cimg_image.width();
int height = cimg_image.height();
cv::Mat mat(height, width, CV_8UC3);
cimg_forXY(cimg_image, x, y) {
// 注意OpenCV默认是BGR顺序
// CImg中像素的存取方式是 (x, y, z, c) 其中c是颜色通道
......@@ -194,7 +186,7 @@ cv::Mat FaceReconitionHandle::loadImageFromByteStream(const QString& filePath) {
qDebug() << "OpenCV Error: " << e.what();
return cv::Mat(); ;
}
return cv::Mat();
}
......@@ -213,10 +205,10 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
imageData.width = source.cols;
imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
if (ret != HSUCCEED) {
qInfo()<<QString("image handle error:%1").arg((long) imageSteamHandle,0,10);
......@@ -224,12 +216,12 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
}
HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) {
qDebug()<<QString("search 未检测到人脸");
return ;
}
std::vector<std::vector<float>> features;
// 被搜索的目标这边推荐使用拷贝式的接口来获取特征向量
HInt32 featureNum;
......@@ -259,7 +251,7 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
qInfo()<<QString("搜索失败: %1").arg(ret);
return ;
}
qDebug()<<QString("搜索置信度: %1").arg(confidence);
qDebug()<<QString("匹配到的tag: %1").arg(searchIdentity.tag);
qDebug()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
......@@ -274,7 +266,7 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
newface.height=multipleFaceData.rects[rect].height;
faces.push_back(newface);
}
rect++;
}
ret = HF_ReleaseImageStream(imageSteamHandle);
......
#ifndef FACERECONITIONHANDLE_H
#define FACERECONITIONHANDLE_H
#include "BaseAlgorithm.h"
#include "hyperface.h"
#include "herror.h"
......@@ -10,8 +11,8 @@
#include<QCoreApplication>
class FaceReconitionHandle
{
class FaceReconitionHandle : public BaseAlgorithm {
private:
HContextHandle ctxHandle=nullptr;
......@@ -19,10 +20,6 @@ private:
std::vector<int32_t>customIds;
std::atomic<qint64> thread_time;
std::atomic<bool> isRunning{false};
std::atomic<bool>isImageChanged{false};
public:
FaceReconitionHandle();
......@@ -31,12 +28,6 @@ public:
cv::Mat loadImage(const QString &path);
cv::Mat loadImageFromByteStream(const QString& filePath);
qint64 getThreadTime() const;
bool getIsRunning() const;
void setIsRunning(bool running);
bool getImageChanged()const;
void setImageChanged(bool imageChanged);
......
......@@ -453,6 +453,116 @@ vides_data::response*HttpService::httpFindGb28181Config(QString &serialNumber){
}
return resp;
}
vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,vides_data::responseConfig &config){
httpUrl.append("/api/v1.0/device/config");
vides_data::response *resp=new vides_data::response();
QUrlQuery query;
query.addQueryItem("sn",serialNumber);
QNetworkRequest request;
QUrl url(httpUrl);
url.setQuery(query);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.get(request)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
QJsonObject dataObj = maps["data"].toObject();
QJsonObject mainFormatObj = dataObj["MainFormat"].toObject();
config.mainFormat.AudioEnable = mainFormatObj["AudioEnable"].toBool();
config.mainFormat.BitRate = mainFormatObj["BitRate"].toInt();
config.mainFormat.BitRateControl = mainFormatObj["BitRateControl"].toString();
config.mainFormat.Compression = mainFormatObj["Compression"].toString();
config.mainFormat.FPS = mainFormatObj["FPS"].toInt();
config.mainFormat.GOP = mainFormatObj["GOP"].toInt();
config.mainFormat.Quality = mainFormatObj["Quality"].toInt();
config.mainFormat.Resolution = mainFormatObj["Resolution"].toString();
config.mainFormat.VirtualGOP = mainFormatObj["VirtualGOP"].toInt();
config.mainFormat.VideoEnable = mainFormatObj["VideoEnable"].toBool();
config.mainFormat.updateAt = mainFormatObj["updateAt"].toVariant().toULongLong();
// 解析 extraFormat
QJsonObject extraFormatObj = dataObj["ExtraFormat"].toObject();
config.extraFormat.AudioEnable = extraFormatObj["AudioEnable"].toBool();
config.extraFormat.BitRate = extraFormatObj["BitRate"].toInt();
config.extraFormat.BitRateControl = extraFormatObj["BitRateControl"].toString();
config.extraFormat.Compression = extraFormatObj["Compression"].toString();
config.extraFormat.FPS = extraFormatObj["FPS"].toInt();
config.extraFormat.GOP = extraFormatObj["GOP"].toInt();
config.extraFormat.Quality = extraFormatObj["Quality"].toInt();
config.extraFormat.Resolution = extraFormatObj["Resolution"].toString();
config.extraFormat.VirtualGOP = extraFormatObj["VirtualGOP"].toInt();
config.extraFormat.VideoEnable = extraFormatObj["VideoEnable"].toBool();
config.extraFormat.updateAt = extraFormatObj["updateAt"].toVariant().toULongLong();
// 解析 timerSettings
QJsonObject timerSettingsObj = dataObj["timerSettings"].toObject();
config.timerSettings.deleteLogFileTimer = timerSettingsObj["deleteLogFileTimer"].toInt();
config.timerSettings.devicePermissionSynTimer = timerSettingsObj["devicePermissionSynTimer"].toInt();
config.timerSettings.updateAt = timerSettingsObj["updateAt"].toVariant().toULongLong();
// 解析 faceConfig
QJsonObject faceConfigObj = dataObj["faceConfig"].toObject();
config.faceConfig.isOn = faceConfigObj["isOn"].toBool();
config.faceConfig.faceNumbers = faceConfigObj["faceNumbers"].toInt();
config.faceConfig.faceFrequency = faceConfigObj["faceFrequency"].toInt();
config.faceConfig.confidence = faceConfigObj["confidence"].toVariant().toFloat();
config.faceConfig.updateAt = faceConfigObj["updateAt"].toVariant().toULongLong();
config.faceConfig.faceLen=faceConfigObj["faceLen"].toInt();
// 解析 licensePlateConfig
QJsonObject licensePlateConfigObj = dataObj["licensePlateConfig"].toObject();
config.licensePlateConfig.isOn = licensePlateConfigObj["isOn"].toBool();
config.licensePlateConfig.carConfidence = licensePlateConfigObj["carConfidence"].toVariant().toFloat();
config.licensePlateConfig.carConfidenceMax = licensePlateConfigObj["carConfidenceMax"].toVariant().toFloat();
config.licensePlateConfig.carConfidenceMin = licensePlateConfigObj["carConfidenceMin"].toVariant().toFloat();
config.licensePlateConfig.licensePlateLen=licensePlateConfigObj["licensePlateLen"].toInt();
config.licensePlateConfig.updateAt = licensePlateConfigObj["updateAt"].toVariant().toULongLong();
// 解析 uniformConfig
QJsonObject uniformConfigObj = dataObj["uniformConfig"].toObject();
config.uniformConfig.isOn = uniformConfigObj["isOn"].toBool();
config.uniformConfig.uniformColor = uniformConfigObj["uniformColor"].toInt();
config.uniformConfig.humanDetectionLen = uniformConfigObj["humanDetectionLen"].toInt();
config.uniformConfig.updateAt = uniformConfigObj["updateAt"].toVariant().toULongLong();
config.uniformConfig.carShapeConfidence = uniformConfigObj["carShapeConfidence"].toVariant().toFloat();
// 解析 devicesConfig
QJsonObject devicesConfigObj = dataObj["camera"].toObject();
config.camera.password = devicesConfigObj["password"].toString();
config.camera.username = devicesConfigObj["username"].toString();
config.camera.updateAt = devicesConfigObj["updateAt"].toVariant().toULongLong();
config.camera.devSnapSynTimer = devicesConfigObj["devSnapSynTimer"].toInt();
config.camera.imageSave = devicesConfigObj["imageSave"].toInt();
config.camera.heightReference = devicesConfigObj["heightReference"].toVariant().toFloat();
//解析mqttConfig
QJsonObject mqttConfigObj = dataObj["mqttConfig"].toObject();
config.mqttConfig.address=mqttConfigObj["address"].toString();
config.mqttConfig.clientId=mqttConfigObj["clientId"].toString();
config.mqttConfig.qos=mqttConfigObj["qos"].toInt();
config.mqttConfig.timeout = mqttConfigObj["timeout"].toVariant().toULongLong();
config.mqttConfig.topic=mqttConfigObj["topic"].toString();
config.mqttConfig.username=mqttConfigObj["username"].toString();
config.mqttConfig.password=mqttConfigObj["password"].toString();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=m_httpClient.errorString();
}
return resp;
}
vides_data::response*HttpService::httpFindStream(QString &serialNumber){
......
......@@ -50,6 +50,9 @@ public:
QString & bucketName,QString &securityToken);
void setHttpUrl(const QString & httpUrl);
vides_data::response *httpDeviceConfig(const QString &serialNumber,vides_data::responseConfig &config);
static vides_data::responseStsCredentials stsCredentials;
~HttpService();
......
......@@ -4,7 +4,7 @@
#include <QDateTime>
HumanDetection::HumanDetection(const QString &modelPaths,
float carShapeConfidence) : heightReference(250.0f),thread_time(0) {
float carShapeConfidence) : heightReference(250.0f){
QByteArray && by_mpath=modelPaths.toUtf8();
char* m_path=by_mpath.data();
detector = TCV_CreateHumanDetector(m_path,1);
......@@ -14,8 +14,11 @@ HumanDetection::HumanDetection(const QString &modelPaths,
}
HumanDetection::~HumanDetection(){
static int i=0;
if(detector!=nullptr){
TCV_ReleaseHumanDetector(detector);
qInfo()<<"工服析构"<<++i;
detector=nullptr;
}
}
......@@ -70,35 +73,21 @@ void HumanDetection::draw_human_on_image(const cv::Mat& image, const TCV_ObjectL
}
}
void HumanDetection::setHuManParameter(float &height_reference,int &uniformColor){
this->heightReference=height_reference;
void HumanDetection::setHuManParameter(int &uniformColor){
this->uniformColor=uniformColor;
}
qint64 HumanDetection::getThreadTime()const{
return thread_time.load(std::memory_order_acquire);
}
bool HumanDetection::getIsRunning()const{
return isRunning.load(std::memory_order_acquire);
}
void HumanDetection::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_release);
}
//0 人形 1 车形 2 工服
int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap, std::vector<vides_data::ParkingArea> &currentPlate) {
int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap, float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate) {
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
TCV_CameraStream *stream = TCV_CreateCameraStream();
ScopeSemaphoreExit streamGuard([this, stream]() {
isRunning.store(false, std::memory_order_release);
// 释放相机流
TCV_ReleaseCameraStream(stream);
isRunning.store(false, std::memory_order_release);
});
TCV_CameraStreamSetData(stream, source.data, source.cols, source.rows);
......@@ -145,6 +134,8 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
}
reMap[0x02] = count_no_uniform; // 未穿工服的行人数量
reMap[0x00] = count_all; // 所有满足条件的行人数量
qInfo()<<"count_all==>"<<count_all;
qInfo()<<"count_no_uniform==>"<<count_no_uniform;
num = (res == 0x00) ? count_all : count_no_uniform;
}
......@@ -166,11 +157,10 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
area.bottomRightCornerX=car.x2;
area.bottomRightCornerY=car.y2;
currentPlate.push_back(area);
qDebug() << "score 检测到的汽车数量匹配度:" << car.score;
}
qDebug() << "findHuManCar 检测到的汽车数量:" << num;
qInfo() << "findHuManCar 检测到的汽车数量:" << num;
} else {
qDebug() << "参数错误";
qInfo() << "参数错误";
}
return num;
}
......@@ -3,35 +3,26 @@
#include "VidesData.h"
#include "so_human_sdk.h"
#include "ScopeSemaphoreExit.h"
#include "BaseAlgorithm.h"
#include <signal.h>
#include <QDateTime>
#include <opencv2/opencv.hpp>
#include <QDebug>
#include <atomic>
#include<QThread>
class HumanDetection:public QObject {
Q_OBJECT
class HumanDetection:public BaseAlgorithm {
public:
HumanDetection(const QString &modelPaths,
float carShapeConfidence);
~HumanDetection();
int findHuManCar(const cv::Mat &source,int res,std::map<int,int>&reMap,
std::vector<vides_data::ParkingArea> &currentPlate);
float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate);
void setHuManParameter(int &uniformColor);
void setHuManParameter(float &height_reference,int &uniformColor);
void draw_human_on_image(const cv::Mat& image, const TCV_ObjectLocation* boxes, int size);
qint64 getThreadTime() const;
bool getIsRunning() const;
void setIsRunning(bool running);
private:
//高度基准
float heightReference;
......@@ -40,11 +31,6 @@ private:
TCV_HumanDetector *detector;
std::atomic<qint64> thread_time;
std::atomic<bool> isRunning{false};
};
#endif // HUMANDETECTION_H
......@@ -24,7 +24,10 @@ LicensePlateRecognition::LicensePlateRecognition(){
}
LicensePlateRecognition::~LicensePlateRecognition(){
static int i=0;
HLPR_ReleaseContext(ctx);
qInfo()<<"车牌析构"<<++i;
}
void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber){
......@@ -112,18 +115,6 @@ void LicensePlateRecognition::filterLicensePlateConfidenceMax(vides_data::reques
}
}
}
qint64 LicensePlateRecognition::getThreadTime()const{
return thread_time.load(std::memory_order_acquire);
}
bool LicensePlateRecognition::getIsRunning()const{
return isRunning.load(std::memory_order_acquire);
}
void LicensePlateRecognition::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_release);
}
void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime) {
......
......@@ -3,6 +3,7 @@
#include "hyper_lpr_sdk.h"
#include "LogHandle.h"
#include "VidesData.h"
#include "BaseAlgorithm.h"
#include <QString>
#include <opencv2/opencv.hpp>
#include <QTextStream>
......@@ -14,7 +15,8 @@
const std::vector<std::string> types =
{"蓝牌", "黄牌单层", "白牌单层", "绿牌新能源", "黑牌港澳",
"香港单层", "香港双层", "澳门单层", "澳门双层", "黄牌双层"};
class LicensePlateRecognition{
class LicensePlateRecognition: public BaseAlgorithm {
public:
//识别车牌号
......@@ -33,22 +35,10 @@ public:
~LicensePlateRecognition();
qint64 getThreadTime() const;
bool getIsRunning() const;
void setIsRunning(bool running);
private:
P_HLPR_Context ctx=nullptr ;
std::atomic<qint64> thread_time;
std::atomic<bool> isRunning{false};
};
......
#include "MqttSubscriber.h"
MqttSubscriber* MqttSubscriber::instance = nullptr;
MqttSubscriber* MqttSubscriber::getInstance(vides_data::MqttConfig& config, QObject* parent) {
if (!instance) {
instance = new MqttSubscriber(config, parent);
}
return instance;
}
MqttSubscriber::MqttSubscriber(vides_data::MqttConfig& config, QObject* parent)
: QObject(parent), config(config) {
QByteArray bAddress = config.address.toUtf8();
char* cAddress=bAddress.data();
QByteArray bClientId = config.clientId.toUtf8();
char* cClientId=bClientId.data();
MQTTAsync_create(&client,cAddress,cClientId, MQTTCLIENT_PERSISTENCE_NONE, nullptr);
MQTTAsync_setCallbacks(client, this, [](void* context, char* cause) {
static_cast<MqttSubscriber*>(context)->connectionLost(cause);
}, [](void* context, char* topicName, int topicLen, MQTTAsync_message* m) {
return static_cast<MqttSubscriber*>(context)->messageArrived(topicName, topicLen, m);
}, nullptr);
}
MqttSubscriber::~MqttSubscriber() {
MQTTAsync_destroy(&client);
instance = nullptr;
}
void MqttSubscriber::start() {
MQTTAsync_connectOptions conn_opts = MQTTAsync_connectOptions_initializer;
conn_opts.keepAliveInterval = 20;
conn_opts.cleansession = 1;
QByteArray bUsername = config.username.toUtf8();
char* cUsername=bUsername.data();
QByteArray bPassword = config.password.toUtf8();
char* cPassword=bPassword.data();
conn_opts.username = cUsername;
conn_opts.password = cPassword;
conn_opts.onSuccess = [](void* context, MQTTAsync_successData* response) {
static_cast<MqttSubscriber*>(context)->onConnect(response);
};
conn_opts.onFailure = [](void* context, MQTTAsync_failureData* response) {
static_cast<MqttSubscriber*>(context)->onConnectFailure(response);
};
conn_opts.context = this;
int rc;
if ((rc = MQTTAsync_connect(client, &conn_opts)) != MQTTASYNC_SUCCESS) {
qInfo() << "启动连接失败,返回编码" << rc;
}
}
void MqttSubscriber::onConnect(MQTTAsync_successData* response) {
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
opts.onSuccess = [](void* context, MQTTAsync_successData* response) {
static_cast<MqttSubscriber*>(context)->onSubscribe(response);
};
opts.onFailure = [](void* context, MQTTAsync_failureData* response) {
static_cast<MqttSubscriber*>(context)->onSubscribeFailure(response);
};
opts.context = this;
QByteArray bTopic = config.topic.toUtf8();
char* cTopic=bTopic.data();
int rc;
if ((rc = MQTTAsync_subscribe(client, cTopic, config.qos, &opts)) != MQTTASYNC_SUCCESS) {
qInfo() << "启动订阅失败,返回编码" << rc<<response->token;
}
}
void MqttSubscriber::onConnectFailure(MQTTAsync_failureData* response) {
qInfo() << "连接失败, rc" << (response ? response->code : -1);
}
void MqttSubscriber::onSubscribe(MQTTAsync_successData* response) {
qInfo() << "订阅成功"<<response->token;
}
void MqttSubscriber::onSubscribeFailure(MQTTAsync_failureData* response) {
qInfo() << "订阅失败, rc" << (response ? response->code : -1);
}
void MqttSubscriber::connectionLost(char* cause) {
qInfo() << "连接丢失";
if (cause) {
qInfo() << "Cause:" << cause;
}
}
int MqttSubscriber::messageArrived(char* topicName, int topicLen, MQTTAsync_message* m) {
QString topic(topicName);
QString payload = QString::fromUtf8(reinterpret_cast<const char*>(m->payload), m->payloadlen);
qInfo() << "Message arrived";
qInfo() << "Topic:" << topic;
qInfo() << "Payload:" << payload;
qInfo()<<"topicLen"<<topicLen;
MQTTAsync_freeMessage(&m);
MQTTAsync_free(topicName);
return 1;
}
void MqttSubscriber::sendSubscriptionConfirmation(const std::string& messageId) {
std::string confirmationTopic = "confirmation/subscription";
std::string confirmationMessage = "Subscription confirmed with message ID: " + messageId;
MQTTAsync_message pubmsg = MQTTAsync_message_initializer;
pubmsg.payload = const_cast<char*>(confirmationMessage.c_str());
pubmsg.payloadlen = confirmationMessage.length();
pubmsg.qos = config.qos;
pubmsg.retained = 0;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
opts.onSuccess = [](void* context, MQTTAsync_successData* response) {
static_cast<MqttSubscriber*>(context)->onPublishSuccess(response);
};
opts.onFailure = [](void* context, MQTTAsync_failureData* response) {
static_cast<MqttSubscriber*>(context)->onPublishFailure(response);
};
opts.context = this;
int rc;
if ((rc = MQTTAsync_sendMessage(client, confirmationTopic.c_str(), &pubmsg, &opts)) != MQTTASYNC_SUCCESS) {
qInfo() << "发送消息失败,返回编码" << rc;
}
}
void MqttSubscriber::onPublishSuccess(MQTTAsync_successData* response) {
qInfo() << "消息已成功发布"<<response->token;
}
void MqttSubscriber::onPublishFailure(MQTTAsync_failureData* response) {
qInfo() << "消息发布失败, rc" << (response ? response->code : -1);
}
#ifndef MQTTSUBSCRIBER_H
#define MQTTSUBSCRIBER_H
#include <MQTTClient.h>
#include <MQTTAsync.h>
#include <QObject>
#include "VidesData.h"
class MqttSubscriber : public QObject
{
Q_OBJECT
public:
static MqttSubscriber* getInstance(vides_data::MqttConfig& config, QObject* parent = nullptr); ~MqttSubscriber();
void start();
private:
MqttSubscriber(vides_data:: MqttConfig& config, QObject* parent = nullptr);
MqttSubscriber(const MqttSubscriber&) = delete;
MqttSubscriber& operator=(const MqttSubscriber&) = delete;
MQTTAsync client;
vides_data::MqttConfig config;
void onConnect(MQTTAsync_successData* response);
void onConnectFailure(MQTTAsync_failureData* response);
void onSubscribe(MQTTAsync_successData* response);
void onSubscribeFailure(MQTTAsync_failureData* response);
void connectionLost(char* cause);
int messageArrived(char* topicName, int topicLen, MQTTAsync_message* m);
void onPublishSuccess(MQTTAsync_successData* response);
void onPublishFailure(MQTTAsync_failureData* response);
void sendSubscriptionConfirmation(const std::string& messageId);
static MqttSubscriber* instance;
};
#endif // MQTTSUBSCRIBER_H
......@@ -31,7 +31,7 @@ struct response
};
struct requestCameraInfo{
QString ip_addr;
QString firmware_version;
QString sSn;
......@@ -204,6 +204,106 @@ struct responseRecognitionData
int recognitionType;
QString sn;
};
struct MainFormat {
bool AudioEnable;
int BitRate;
QString BitRateControl;
QString Compression;
int FPS;
int GOP;
int Quality;
QString Resolution;
int VirtualGOP;
bool VideoEnable;
quint64 updateAt;
};
struct ExtraFormat {
bool AudioEnable;
int BitRate;
QString BitRateControl;
QString Compression;
int FPS;
int GOP;
int Quality;
QString Resolution;
int VirtualGOP;
bool VideoEnable;
quint64 updateAt;
};
struct TimerSettings {
int deleteLogFileTimer;
int devicePermissionSynTimer;
quint64 updateAt;
};
struct Camera {
int devSnapSynTimer;
int imageSave;
QString password;
QString username;
float heightReference;
quint64 updateAt;
};
struct FaceConfig {
bool isOn;
int faceNumbers;
uint64 faceFrequency;
float confidence;
int faceLen;
quint64 updateAt;
};
struct LicensePlateConfig {
bool isOn;
float carConfidence;
float carConfidenceMax;
float carConfidenceMin;
int licensePlateLen;
quint64 updateAt;
};
struct UniformConfig {
bool isOn;
int uniformColor;
int humanDetectionLen;
float carShapeConfidence;
quint64 updateAt;
};
struct MqttConfig {
QString address;
QString clientId;
int qos;
quint64 timeout;
QString topic;
QString username;
QString password;
};
struct responseConfig {
MainFormat mainFormat;
ExtraFormat extraFormat;
TimerSettings timerSettings;
FaceConfig faceConfig;
LicensePlateConfig licensePlateConfig;
UniformConfig uniformConfig;
Camera camera;
MqttConfig mqttConfig;
};
struct Devices{
QString id;
QString state;
};
struct responseMqttData{
uint8_t msg_type;
std::list<Devices>devices;
};
inline bool isVirtualMachine()
{
QString dmiPath;
......@@ -269,14 +369,25 @@ inline bool pingAddress(const QString &address) {
QProcess process;
QString program = "ping";
QStringList arguments;
arguments << "-c" << "1" << address; // -c 1 表示发送一个 Ping 包
#ifdef Q_OS_WIN
arguments << "-n" << "1" << address;
#else
arguments << "-c" << "1" << address;
#endif
process.start(program, arguments);
process.waitForFinished();
if (!process.waitForStarted()) {
return false;
}
if (!process.waitForFinished(1000)) {
return false;
}
QString output(process.readAllStandardOutput());
// 此处可以使用更复杂的逻辑来解析 Ping 输出
return output.contains("1 packets transmitted, 1 received");
}
inline int GetCpuIdByAsm_arm(char* cpu_id)
......@@ -287,14 +398,14 @@ inline int GetCpuIdByAsm_arm(char* cpu_id)
qDebug()<<"failed to open cpuinfo";
return -1;
}
char cpuSerial[100] = {0};
while(!feof(fp))
{
memset(cpuSerial, 0, sizeof(cpuSerial));
fgets(cpuSerial, sizeof(cpuSerial) - 1, fp); // leave out \n
char* pch = strstr(cpuSerial,"Serial");
if (pch)
{
......@@ -302,7 +413,7 @@ inline int GetCpuIdByAsm_arm(char* cpu_id)
if (pch2)
{
memmove(cpu_id, pch2 + 2, strlen(cpuSerial));
break;
}
else
......@@ -313,7 +424,7 @@ inline int GetCpuIdByAsm_arm(char* cpu_id)
}
}
fclose(fp);
return 0;
}
......
......@@ -12,7 +12,7 @@ TEMPLATE = app
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.1.0\\\"
DEFINES += APP_VERSION=\\\"1.3.0\\\"
QMAKE_LIBDIR += /usr/local/lib
......@@ -23,6 +23,7 @@ INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human
INCLUDEPATH+=/usr/local/include/CImg
INCLUDEPATH+=/usr/local/include/mqtt
#unix:contains(QMAKE_HOST.arch, x86_64) {
# QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
......@@ -76,6 +77,7 @@ LIBS += -lopencv_core \
-lhyperlpr3 \
-lopencv_objdetect \
-lsohuman \
-lpaho-mqtt3a \
# -lssl \
# -lcrypto \ sudo apt-get install libjpeg-dev libpng-dev
-lc \
......@@ -97,7 +99,9 @@ SOURCES += \
HumanDetection.cpp \
ScopeSemaphoreExit.cpp \
FaceReconitionHandle.cpp \
AlgorithmTaskManage.cpp
AlgorithmTaskManage.cpp \
BaseAlgorithm.cpp \
MqttSubscriber.cpp
HEADERS += \
Common.h \
......@@ -115,7 +119,9 @@ HEADERS += \
HumanDetection.h \
ScopeSemaphoreExit.h \
FaceReconitionHandle.h \
AlgorithmTaskManage.h
AlgorithmTaskManage.h \
BaseAlgorithm.h \
MqttSubscriber.h
#FORMS += \
# mainwindow.ui
......
......@@ -7,8 +7,8 @@ MainWindow::MainWindow()
{
sp_this=this;
LogHandler::Get().installMessageHandler();
QString inifile=QCoreApplication::applicationDirPath()+"/gameras.ini";
......@@ -16,41 +16,11 @@ MainWindow::MainWindow()
qSetting->setIniCodec(QTextCodec::codecForName("UTF-8"));
modelPaths=qSetting->value("licensePlateRecognition/model_paths").toString();
initCommon();
QThreadPool* threadPool = QThreadPool::globalInstance();
threadPool->setMaxThreadCount(12);
deleteLogFileTimer =new QTimer(this);
connect(deleteLogFileTimer, &QTimer::timeout, this, &MainWindow::deleteLogFile);
int deleteLogfileTimer=qSetting->value("timer/delete_logfile_timer").toInt();
deleteLogFileTimer->start(deleteLogfileTimer);
deleteFrameFileTimer =new QTimer(this);
int deMkvflieTimer=qSetting->value("timer/delete_mkvflie_timer").toInt();
connect(deleteFrameFileTimer,&QTimer::timeout,this,&MainWindow::deleteMkvFileTimer);
deleteFrameFileTimer->start(deMkvflieTimer);
QThreadPool* threadPool = QThreadPool::globalInstance();
initFaceFaceRecognition();
threadPool->setMaxThreadCount(12);
float heightReference=qSetting->value("devices/height_reference").toFloat();
int uniformColor=qSetting->value("devices/uniformColor").toInt();
int humanDetectionLen=qSetting->value("devices/humanDetectionLen",3).toInt();
int licensePlateLen=qSetting->value("devices/licensePlateLen",3).toInt();
int faceLen=qSetting->value("devices/faceLen",3).toInt();
float carShapeConfidence=qSetting->value("devices/carShapeConfidence").toFloat();
Common & instace= Common::getInstance();
instace.setTaskManage(humanDetectionLen,licensePlateLen,faceLen);
float carConfidence=qSetting->value("devices/carConfidence").toFloat();
AlgorithmTaskManage &algorithmTaskManage= AlgorithmTaskManage::getInstance(humanDetectionLen,licensePlateLen,faceLen);
algorithmTaskManage.initHumanDetectionManage(modelPaths,carShapeConfidence,heightReference,uniformColor);
algorithmTaskManage.initLicensePlateManage(modelPaths,carConfidence);
QString httpurl;
QString profile=qSetting->value("cloudservice/profile","test").toString();
if(strcmp(profile.toUtf8().data(),vides_data::PROFLIE_TEST)==0 ){
......@@ -60,9 +30,53 @@ MainWindow::MainWindow()
}else{
httpurl=qSetting->value("cloudservice/pro_http").toString();
}
Common & instace= Common::getInstance();
QString serialNumber;
findLocalSerialNumber(serialNumber);
qInfo()<<"serialNumber==:"<<serialNumber;
HttpService httpService(httpurl);
vides_data::response *res= httpService.httpDeviceConfig(serialNumber,config);
if(res->code!=0){
qInfo()<<"请求远程商户配置失败";
instace.deleteObj(res);
return ;
}
instace.deleteObj(res);
initCommon();
deleteLogFileTimer =new QTimer(this);
connect(deleteLogFileTimer, &QTimer::timeout, this, &MainWindow::deleteLogFile);
int deleteLogfileTimer=config.timerSettings.deleteLogFileTimer;
deleteLogFileTimer->start(deleteLogfileTimer);
initFaceFaceRecognition();
int uniformColor=config.uniformConfig.uniformColor;
int humanDetectionLen=config.uniformConfig.humanDetectionLen;
int licensePlateLen=config.licensePlateConfig.licensePlateLen;
int faceLen=config.faceConfig.faceLen;
float carShapeConfidence=config.uniformConfig.carShapeConfidence;
float carConfidence=config.licensePlateConfig.carConfidence;
AlgorithmTaskManage &algorithmTaskManage= AlgorithmTaskManage::getInstance();
algorithmTaskManage.initialize(humanDetectionLen,licensePlateLen,faceLen,true,0x00);
algorithmTaskManage.initHumanDetectionManage(modelPaths,carShapeConfidence,uniformColor);
algorithmTaskManage.initLicensePlateManage(modelPaths,carConfidence);
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
QString configPath = qSetting->value("devices/sz_config_path").toString();
QString tempPath = qSetting->value("devices/sz_temp_path").toString();
int sdk_handle= mediaFaceImage->SdkInit(configPath,tempPath);
qDebug()<<"句柄为:"<<sdk_handle;
if(sdk_handle<0){
......@@ -73,25 +87,25 @@ MainWindow::MainWindow()
dePermissionSynTimer=new QTimer(this);
int dePermissionTimer=qSetting->value("timer/device_permission_syn_timer").toInt();
int dePermissionTimer=config.timerSettings.devicePermissionSynTimer;
connect(dePermissionSynTimer, &QTimer::timeout, this, [this, httpurl](){
this->startCamera(httpurl);
},Qt::QueuedConnection);
this->startCamera(httpurl);
float confidence=qSetting->value("devices/confidence").toFloat();
int faceNumbers=qSetting->value("devices/faceNumbers").toInt();
float confidence=config.faceConfig.confidence;
int faceNumbers=config.faceConfig.faceNumbers;
algorithmTaskManage.initFaceReconitionHandle(localImageMap,faceNumbers,confidence);
// 设置定时器间隔
dePermissionSynTimer->setInterval(dePermissionTimer);
// 启动定时器
dePermissionSynTimer->start();
//dePermissionSynTimer->start(dePermissionTimer);
//vides_data::scanWiFiNetworks();
connect(&server, &QTcpServer::newConnection, this, &MainWindow::handleMatNewConnection);
......@@ -103,6 +117,61 @@ MainWindow::MainWindow()
} else {
qDebug() << "Server started, listening on port 12345";
}
this->mqttConfig= config.mqttConfig;
MqttSubscriber* subscriber = MqttSubscriber::getInstance(mqttConfig);
subscriber->start();
}
void MainWindow::divParameterUpdate(vides_data::responseConfig &cloudConfig ){
bool faceAlgorithm = false, licensePlateAlgorithm = false, uniformAlgorithm = false, timeChange = false;
AlgorithmTaskManage &algorithmTaskManage= AlgorithmTaskManage::getInstance();
if(cloudConfig.faceConfig.updateAt!=config.faceConfig.updateAt){
config.faceConfig=cloudConfig.faceConfig;
faceAlgorithm=true;
}
if(cloudConfig.licensePlateConfig.updateAt!=config.licensePlateConfig.updateAt){
config.licensePlateConfig=cloudConfig.licensePlateConfig;
licensePlateAlgorithm=true;
}
if(cloudConfig.uniformConfig.updateAt!=config.uniformConfig.updateAt){
config.uniformConfig=cloudConfig.uniformConfig;
uniformAlgorithm=true;
}
if(config.timerSettings.updateAt!=cloudConfig.timerSettings.updateAt){
timeChange=true;
}
if(!faceAlgorithm && !licensePlateAlgorithm && !uniformAlgorithm && !timeChange){
return;
}
__uint8_t alg= this->intToUint8t(faceAlgorithm,licensePlateAlgorithm,uniformAlgorithm) ;
algorithmTaskManage.releaseResources(cloudConfig.uniformConfig.humanDetectionLen
,cloudConfig.licensePlateConfig.licensePlateLen,
cloudConfig.faceConfig.faceLen,modelPaths,
cloudConfig.uniformConfig.carShapeConfidence
,cloudConfig.uniformConfig.uniformColor,cloudConfig.licensePlateConfig.carConfidence,localImageMap,cloudConfig.faceConfig.faceNumbers,cloudConfig.faceConfig.confidence,alg);
if(config.timerSettings.updateAt!=cloudConfig.timerSettings.updateAt){
if(config.timerSettings.deleteLogFileTimer!=cloudConfig.timerSettings.deleteLogFileTimer){
deleteLogFileTimer->stop();
deleteLogFileTimer->setInterval(cloudConfig.timerSettings.deleteLogFileTimer);
deleteLogFileTimer->start();
}
if(config.timerSettings.devicePermissionSynTimer!=cloudConfig.timerSettings.devicePermissionSynTimer){
dePermissionSynTimer->stop();
dePermissionSynTimer->setInterval(cloudConfig.timerSettings.devicePermissionSynTimer);
dePermissionSynTimer->start();
}
config.timerSettings = cloudConfig.timerSettings;
}
}
......@@ -292,13 +361,13 @@ void MainWindow::updateLocalFace(const QString &httpurl) {
}
if (isChanged) {
int humalen=instance.getHumanDetectionLen();
int licensePlateLen=instance.getLicensePlateLen();
int facelen=instance.getFaceLen();
float confidence=qSetting->value("devices/confidence").toFloat();
int faceNumbers=qSetting->value("devices/faceNumbers").toInt();
AlgorithmTaskManage &algorithmTaskManage= AlgorithmTaskManage::getInstance(humalen,licensePlateLen,facelen);
float confidence=config.faceConfig.confidence;
qSetting->value("devices/confidence").toFloat();
int faceNumbers=config.faceConfig.faceNumbers;
AlgorithmTaskManage &algorithmTaskManage= AlgorithmTaskManage::getInstance();
if (cloudImageMap.empty()) {
// 如果云端映射现在为空,移除所有特征
//faceRecognition.featureRemove();
......@@ -310,7 +379,7 @@ void MainWindow::updateLocalFace(const QString &httpurl) {
qInfo()<<"startMap != endMap-->";
// faceRecognition.initSourceImageMap(localImageMap,faceNumbers, confidence);
algorithmTaskManage.modifyImageFeature(localImageMap,faceNumbers,confidence,false);
}
}
for (vides_data::responseFaceReconition* data : datas)
......@@ -336,7 +405,7 @@ void MainWindow::findLocalSerialNumber(QString &serialNumber){
number=cpu_id_arm;
}
}
serialNumber =number.trimmed();
localSn=serialNumber;
}
......@@ -348,7 +417,7 @@ void MainWindow::clearHandle(QString sDevId, int nDevPort){
auto it = this->faceDetectionParkingPushs.find(key);
if (it != this->faceDetectionParkingPushs.end()) {
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qInfo()<<"clearHandle:离线的设备是:"<<key;
CameraHandle* offlineCameraHandle = it->second; // 注意使用->second获取值
int hDevice=offlineCameraHandle->getHdevice();
......@@ -366,26 +435,22 @@ void MainWindow::startCamera(const QString &httpurl){
Common & instace= Common::getInstance();
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
QString nonConstHttpUrl = std::remove_const<QString>::type(httpurl);
vides_data::responseDeviceData devices;
// QString serialNumber = QSysInfo::machineUniqueId();
QString serialNumber;
findLocalSerialNumber(serialNumber);
qInfo()<<"serialNumber==:"<<serialNumber;
vides_data::requestDeviceStatus reStatus;
reStatus.sSn=serialNumber;
reStatus.status=1;
reStatus.type=1;
reStatus.ip_addr=instace.GetLocalIp();
reStatus.firmware_version=APP_VERSION;
HttpService httpService(httpurl);
vides_data::response *re= httpService.httpFindCameras(serialNumber,devices);
if(re->code==0 || re->code==20004){
QString username = qSetting->value("devices/username").toString();
QString password = qSetting->value("devices/password").toString();
std::map<QString,vides_data::localDeviceStatus*> localDevices;
mediaFaceImage->SdkSearchDevicesSyn(localDevices);
if(localDevices.size()<=0){
......@@ -398,7 +463,17 @@ void MainWindow::startCamera(const QString &httpurl){
instace.deleteObj(res);
return ;
}
int alg=devices.algorithm;
vides_data::responseConfig cloudConfig;
httpService.setHttpUrl(httpurl);
vides_data::response *res_config= httpService.httpDeviceConfig(serialNumber,cloudConfig);
if(res_config->code!=0){
qInfo()<<"请求远程商户配置失败";
instace.deleteObj(res_config);
return ;
}
instace.deleteObj(res_config);
divParameterUpdate(cloudConfig);
for (const auto& device : devices.list) {
if(localDevices.count(device.sSn)>0 ){
......@@ -406,25 +481,35 @@ void MainWindow::startCamera(const QString &httpurl){
QString ipAddress= QString("%1.%2.%3.%4").arg(localDevice->HostIP.c[0]).arg(localDevice->HostIP.c[1]).arg(localDevice->HostIP.c[2]).arg(localDevice->HostIP.c[3]);
//this->gatewayRandomIp(ipAddress);
QString key = ipAddress + ":" + QString::number(localDevice->TCPPort);
httpService.setHttpUrl(httpurl);
vides_data::responseConfig devConfig;
vides_data::response *devRes=httpService.httpDeviceConfig(device.sSn,devConfig);
if(devRes->code!=0){
qInfo()<<"请求相机配置失败";
instace.deleteObj(devRes);
continue;
}
instace.deleteObj(devRes);
if(faceDetectionParkingPushs.count(key)<=0){
httpService.setHttpUrl(httpurl);
vides_data::cameraParameters parameter;
parameter.sDevId=ipAddress;
parameter.nDevPort=localDevice->TCPPort;
parameter.sUserName=username;
parameter.sPassword=password;
parameter.sUserName=devConfig.camera.username;
parameter.sPassword=devConfig.camera.password;
parameter.channel=localDevice->ChannelNum;
parameter.httpUrl=nonConstHttpUrl;
parameter.sSn=device.sSn;
//parameter.rtspUrl="rtsp://192.168.10.131:554/user=admin&password=&channel=1&stream=1.sdp?";
//parameter.rtspUrl=std::move(QString("rtsp://admin:@%1/stream1").arg(ipAddress));
this->initCameras(parameter,device.areas,alg,reStatus.camera_info_list);
this->initCameras(parameter,devConfig,device.areas,reStatus.camera_info_list);
}
else {
CameraHandle *indexHandle=findHandle(device.sSn);
if(indexHandle!=nullptr &&device.is_reboot){
indexHandle->deviceReboot();
indexHandle->deviceReboot(false);
}else {
auto it = this->faceDetectionParkingPushs.find(key);
if (it != this->faceDetectionParkingPushs.end()) {
......@@ -433,22 +518,25 @@ void MainWindow::startCamera(const QString &httpurl){
camera_info.sSn=offlineCameraHandle->getSSn();
offlineCameraHandle->findIp(camera_info.ip_addr);
offlineCameraHandle->findFirmwareVersion(camera_info.firmware_version);
reStatus.camera_info_list.push_front(camera_info);
__uint8_t new_algorithm= intToUint8t(alg);
__uint8_t new_algorithm= intToUint8t(devConfig.faceConfig.isOn,devConfig.licensePlateConfig.isOn,devConfig.uniformConfig.isOn);
offlineCameraHandle->cameraParameterUpdate(devConfig);
offlineCameraHandle->initAlgorithmPermissions(new_algorithm);
if(!offlineCameraHandle->compareLists(device.areas)){
offlineCameraHandle->updateParkMapAndParkingSpaceInfos(device.areas);
}
}
}
}
}
}
this->deleteCloudNotCamer(localDevices, devices.list);
for (auto& pair : localDevices) {
if (pair.second != nullptr) { // 如果对象未被删除(即不为nullptr)
instace.deleteObj(pair.second);
......@@ -457,19 +545,19 @@ void MainWindow::startCamera(const QString &httpurl){
// 清空 localDevices 容器
localDevices.clear();
}
httpService.setHttpUrl(httpurl);
vides_data::response *res=httpService.httpPostDeviceStatus(reStatus);
if(res->code!=0){
qInfo()<<"盒子状态上报失败 code:"<<res->code<<"msg:"<<res->msg;
}
instace.deleteObj(res);
updateLocalFace(httpurl);
instace.deleteObj(re);
}
bool MainWindow::isDeviceInList(const QString& deviceId, const std::list<vides_data::responseDeviceStatus>& devices) {
......@@ -491,26 +579,74 @@ void MainWindow::deleteCloudNotCamer(const std::map<QString, vides_data::localDe
}
}
void MainWindow::initDevConfigSyn(CameraHandle *cameraHandle){
void MainWindow::initDevConfigSyn(CameraHandle *cameraHandle,vides_data::responseConfig &devConfig){
Common & instace= Common::getInstance();
QString time= instace.getTimeString();
cameraHandle->sdkDevSystemTimeZoneSyn(time);
QString recor;
iniRecordingToString(recor);
initRecordingToString(recor);
QByteArray bRecor =recor.toUtf8();
const char* cRecor=bRecor.data();
cameraHandle->sdkRecordCfg(cRecor);
QString enCode;
iniEncodeToString(enCode);
QByteArray bCode =enCode.toUtf8();
const char* cCode=bCode.data();
cameraHandle->sdkEncodeCfg(cCode);
QString enCode_one;
initDeviceEncodeToString(devConfig,enCode_one);
QByteArray benCode_one =enCode_one.toUtf8();
const char* b_one=benCode_one.data();
cameraHandle->sdkEncodeCfg(b_one);
}
void MainWindow::iniEncodeToString(QString &enCodeJson) {
void MainWindow::initDeviceEncodeToString(vides_data::responseConfig &source, QString &targetCodeJson){
// 创建 JSON 对象
QJsonObject rootObject;
// 添加 ExtraFormat 到 JSON 对象中
QJsonObject extraFormatObject;
QJsonObject videoObjectExtra = {
{"BitRate", source.extraFormat.BitRate},
{"BitRateControl",source.extraFormat.BitRateControl},
{"Compression", source.extraFormat.Compression},
{"FPS", source.extraFormat.FPS},
{"GOP", source.extraFormat.GOP},
{"Quality", source.extraFormat.Quality},
{"Resolution", source.extraFormat.Resolution},
{"VirtualGOP", source.extraFormat.VirtualGOP}
};
extraFormatObject["VideoEnable"] = source.extraFormat.VideoEnable;
extraFormatObject["AudioEnable"] =source.extraFormat.AudioEnable;
extraFormatObject["Video"] = videoObjectExtra;
rootObject["ExtraFormat"] = extraFormatObject;
// 添加 MainFormat 到 JSON 对象中
QJsonObject mainFormatObject;
QJsonObject videoObjectMain = {
{"BitRate", source.mainFormat.BitRate},
{"BitRateControl", source.mainFormat.BitRateControl},
{"Compression", source.mainFormat.Compression},
{"FPS", source.mainFormat.FPS},
{"GOP", source.mainFormat.GOP},
{"Quality", source.mainFormat.Quality},
{"Resolution", source.mainFormat.Resolution},
{"VirtualGOP",source.mainFormat.VirtualGOP}
};
mainFormatObject["VideoEnable"] = source.mainFormat.VideoEnable;
mainFormatObject["AudioEnable"] =source.mainFormat.AudioEnable;
mainFormatObject["Video"] = videoObjectMain;
rootObject["MainFormat"] = mainFormatObject;
QJsonArray jsonArray;
jsonArray.append(rootObject);
// 将 JSON 对象转换为 JSON 文档
QJsonDocument jsonDocument(jsonArray);
targetCodeJson = QString::fromUtf8(jsonDocument.toJson());
}
void MainWindow::initEncodeToString(QString &enCodeJson) {
// 创建 JSON 对象
QJsonObject rootObject;
// 添加 ExtraFormat 到 JSON 对象中
QJsonObject extraFormatObject;
QJsonObject videoObjectExtra = {
......@@ -527,7 +663,7 @@ void MainWindow::iniEncodeToString(QString &enCodeJson) {
extraFormatObject["AudioEnable"] = qSetting->value("ExtraFormat/AudioEnable").toBool();
extraFormatObject["Video"] = videoObjectExtra;
rootObject["ExtraFormat"] = extraFormatObject;
// 添加 MainFormat 到 JSON 对象中
QJsonObject mainFormatObject;
QJsonObject videoObjectMain = {
......@@ -546,7 +682,7 @@ void MainWindow::iniEncodeToString(QString &enCodeJson) {
rootObject["MainFormat"] = mainFormatObject;
QJsonArray jsonArray;
jsonArray.append(rootObject);
// 将 JSON 对象转换为 JSON 文档
QJsonDocument jsonDocument(jsonArray);
enCodeJson = QString::fromUtf8(jsonDocument.toJson());
......@@ -556,7 +692,7 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
qDebug()<<"iniWorkSpVMn=="<<sn;
QString jsonfile=QCoreApplication::applicationDirPath()+"/camera_config.json";
bool isEqual=true;
// 读取 JSON 配置文件
QFile file(jsonfile);
if (!file.open(QIODevice::ReadOnly)) {
......@@ -564,13 +700,13 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
return isEqual;
}
QJsonObject toJsonObject;
// 解析 JSON 数据
QByteArray jsonData = file.readAll();
file.close();
QJsonDocument jsonDoc = QJsonDocument::fromJson(jsonData);
QJsonObject rootObj = jsonDoc.object();
// 获取 cameraconfigs 对象
QJsonArray cameraConfigs = rootObj.value("cameraconfigs").toArray();
bool found = false;
......@@ -615,12 +751,12 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
toJsonObject["iHsIntervalTime"]=gb28181->heartbeat_interval;
toJsonObject["szConnPass"]=gb28181->password;
toJsonObject["szDeviceNO"]=gb28181->device_id;
QVariantList variantList;
for (int i = 0; i < 64; ++i) {
variantList.append(QVariant(0));
}
QJsonArray levelArray = QJsonArray::fromVariantList(variantList);
toJsonObject["AlarmLevel"]=levelArray;
QStringList alarmidStrings;
......@@ -633,9 +769,9 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
for (int i = 0; i < 64; ++i) {
variantListLevel.append(QVariant(0));
}
QJsonArray camreaLevelArray = QJsonArray::fromVariantList(variantListLevel);
toJsonObject["CamreaLevel"]=camreaLevelArray;
QStringList camreaidStrings;
for (int i = 1; i <= 64; ++i) {
......@@ -673,7 +809,7 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
newValue["iHsIntervalTime"]=gb28181->heartbeat_interval;
newValue["szConnPass"]=gb28181->password;
newValue["szDeviceNO"]=gb28181->device_id;
toJsonObject["szCsIP"]=gb28181->sip_ip ;
toJsonObject["szServerNo"]=gb28181->serial;
toJsonObject["sCsPort"]=gb28181->sip_port;
......@@ -682,14 +818,14 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
toJsonObject["iHsIntervalTime"]=gb28181->heartbeat_interval;
toJsonObject["szConnPass"]=gb28181->password;
toJsonObject["szDeviceNO"]=gb28181->device_id;
QVariantList variantList;
for (int i = 0; i < 64; ++i) {
variantList.append(0);
}
QJsonArray levelArray = QJsonArray::fromVariantList(variantList);
toJsonObject["AlarmLevel"]=levelArray;
QStringList alarmidStrings;
for (int i = 1; i <= 64; ++i) {
......@@ -738,14 +874,14 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
QJsonDocument saveDoc(rootObj);
file.write(saveDoc.toJson());
file.close();
}
return isEqual;
}
void MainWindow::iniRecordingToString(QString &recorJson){
void MainWindow::initRecordingToString(QString &recorJson){
QJsonObject jsonObject;
// 读取 Mask 数据
QJsonArray maskArray;
// 遍历所有掩码
......@@ -754,27 +890,27 @@ void MainWindow::iniRecordingToString(QString &recorJson){
// 读取掩码值。存储为QStringList,就像在ini文件中定义的一样
QStringList maskValues = qSetting->value(maskKey).toStringList();
QJsonArray maskSubArray;
foreach (const QString &value, maskValues) {
maskSubArray.append(value.trimmed());
}
maskArray.append(maskSubArray);
}
jsonObject["Mask"] = maskArray;
// 读取 Packet 数据
jsonObject["PacketLength"] =qSetting->value("Packet/PacketLength").toInt();
jsonObject["PreRecord"] = qSetting->value("Packet/PreRecord").toInt();
jsonObject["RecordMode"] = qSetting->value("Packet/RecordMode").toString();
jsonObject["Redundancy"] = qSetting->value("Packet/Redundancy").toBool();
// 读取 TimeSection 数据
QJsonArray timeArray;
for (int ts = 1; ts <= 7; ts++) {
QString tsKey = QString("TimeSection/TimeSection_%1").arg(ts);
// 读取掩码值。存储为QStringList,就像在ini文件中定义的一样
QStringList tsValues = qSetting->value(tsKey).toStringList();
QJsonArray timeSubArray;
......@@ -790,38 +926,47 @@ void MainWindow::iniRecordingToString(QString &recorJson){
recorJson = QString::fromUtf8(jsonDocument.toJson());
}
__uint8_t MainWindow::intToUint8t(int algorithm){
if (algorithm >= 0 && algorithm <= 255) {
return static_cast<__uint8_t>(algorithm);
}
// 处理错误或取值超出范围的情况
qInfo()<<"Value out of range for conversion to __uint8_t";
__uint8_t MainWindow::intToUint8t(bool faceAlgorithm, bool licensePlateAlgorithm, bool uniformAlgorithm) {
__uint8_t result = 0;
return 0x07;
// 工服识别对应最高位(第2位)
result |= (uniformAlgorithm ? 1 : 0) << 2;
// 人脸识别对应次高位(第1位)
result |= (faceAlgorithm ? 1 : 0) << 1;
// 车牌识别对应最低位(第0位)
result |= (licensePlateAlgorithm ? 1 : 0);
return result;
}
void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::list<vides_data::responseArea>&areas,int algorithm,std::list<vides_data::requestCameraInfo>&camera_info_list){
void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data::responseConfig &devConfig,const std::list<vides_data::responseArea>&areas,std::list<vides_data::requestCameraInfo>&camera_info_list){
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
int image_save=qSetting->value("devices/image_save").toInt();
CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,image_save);
Common & instace= Common::getInstance();
int image_save=devConfig.camera.imageSave;
float heightReference=devConfig.camera.heightReference;
CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,image_save,heightReference,devConfig);
int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,3000);
qDebug()<<"句柄为2:"<<sdk_handle;
if(sdk_handle<=0){
qInfo() << "登录失败";
return ;
}
initDevConfigSyn(cameraHandle);
mediaFaceImage->setMap(sdk_handle,cameraHandle);
cameraHandle->sdkDevSetAlarmListener(sdk_handle,0);
int synTime=qSetting->value("timer/dev_snap_syn_timer").toInt();
uint64 face_frequency=qSetting->value("devices/face_frequency").toULongLong();
initDevConfigSyn(cameraHandle,devConfig);
int synTime=devConfig.camera.devSnapSynTimer;
uint64 face_frequency=devConfig.faceConfig.faceFrequency;
float carConfidenceMax=devConfig.licensePlateConfig.carConfidenceMax;
float carConfidenceMin=devConfig.licensePlateConfig.carConfidenceMin;
cameraHandle->initSdkRealTimeDevSnapSyn(sdk_handle,synTime,face_frequency);
cameraHandle->setCarConfidenceMaxAndMin(carConfidenceMax,carConfidenceMin);
// QString pwd="admin2024";
// QString sid="MERCURY_8C4F";
// cameraHandle->sdkWifi(pwd,sid);
......@@ -829,14 +974,13 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
camera_info.sSn=parameter.sSn;
camera_info.ip_addr=parameter.sDevId;
cameraHandle->findFirmwareVersion(camera_info.firmware_version);
camera_info_list.push_front(camera_info);
__uint8_t new_algorithm= intToUint8t(algorithm);
__uint8_t new_algorithm= intToUint8t(devConfig.faceConfig.isOn,devConfig.licensePlateConfig.isOn,devConfig.uniformConfig.isOn);
cameraHandle->initAlgorithmPermissions(new_algorithm);
cameraHandle->initParkingSpaceInfo(areas);
Common & instace= Common::getInstance();
QString key =parameter.sDevId + ":" + QString::number(parameter.nDevPort);
faceDetectionParkingPushs[key]= cameraHandle;
HttpService httpService(parameter.httpUrl);
......@@ -847,13 +991,14 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
return;
}
vides_data::responseGb28181 *gb281 = reinterpret_cast<vides_data::responseGb28181*>(res->data);
QString stGb281;
bool re= iniWorkSpVMn(gb281,stGb281,parameter.sSn);
if(!re){
QByteArray bGb =stGb281.toUtf8();
const char* cGb=bGb.data();
cameraHandle->sdkDevSpvMn(cGb);
}
// QString stGb281;
// bool re= iniWorkSpVMn(gb281,stGb281,parameter.sSn);
// if(!re){
// QByteArray bGb =stGb281.toUtf8();
// const char* cGb=bGb.data();
// cameraHandle->sdkDevSpvMn(cGb);
// }
cameraHandle->updateSdkDevSpvMn(gb281);
instace.deleteObj(gb281);
instace.deleteObj(res);
}
......@@ -894,11 +1039,6 @@ void MainWindow::initCommon(){
createDirectory(0x01,"frame_images", "目录创建成功", "目录创建失败");
createDirectory(0x00,"frame_video", "创建视频目录成功", "视频目录创建失败");
createDirectory(0x02,"images", "图片目录创建成功", "图片目录创建失败");
float carConfidenceMax=qSetting->value("devices/carConfidenceMax").toFloat();
float carConfidenceMin=qSetting->value("devices/carConfidenceMin").toFloat();
Common& instance = Common::getInstance();
instance.setCarConfidenceMax(carConfidenceMax);
instance.setCarConfidenceMin(carConfidenceMin);
}
MainWindow::~MainWindow()
......@@ -907,36 +1047,16 @@ MainWindow::~MainWindow()
Common & instace= Common::getInstance();
instace.deleteObj(qSetting);
instace.deleteObj(deleteLogFileTimer);
instace.deleteObj(deleteFrameFileTimer);
instace.deleteObj(dePermissionSynTimer);
for(auto iter = faceDetectionParkingPushs.begin(); iter != faceDetectionParkingPushs.end(); ++iter) {
instace.deleteObj( iter->second);
}
// 清空 handleMap
faceDetectionParkingPushs.clear();
LogHandler::Get().uninstallMessageHandler();
}
void MainWindow::deleteMkvFileTimer(){
Common& instance = Common::getInstance();
QDir dir(instance.getVideoOut());
QStringList filters;
filters << "*.mp4" << "*.avi" << "*.jpg" << "*.mkv"; // 根据需要添加其他视频格式
dir.setNameFilters(filters);
QFileInfoList fileList = dir.entryInfoList(QDir::Files | QDir::NoDotAndDotDot);
foreach (QFileInfo fileInfo, fileList) {
QDateTime createTime = fileInfo.metadataChangeTime();
QDateTime now = QDateTime::currentDateTime();
if (createTime.secsTo(now) > 24 * 3600) { // 超过72小时
if (!QFile::remove(fileInfo.absoluteFilePath())) {
qInfo() << "Failed to delete file:" << fileInfo.fileName();
}
}
}
}
void MainWindow::deleteLogFile(){
......@@ -945,20 +1065,20 @@ void MainWindow::deleteLogFile(){
// 前7天
QDateTime dateTime1 = now.addDays(-7);
QDateTime dateTime2;
QString logPath = logDir.absoluteFilePath(""); // 日志的路径
QDir dir(logPath);
QStringList filename ;
filename << "*.log";//可叠加,可使用通配符筛选
QFileInfoList fileList = dir.entryInfoList(filename);
foreach (QFileInfo f, fileList) {
// "."和".."跳过
if (f.baseName() == "" || f.baseName()=="today" )
continue;
dateTime2 = QDateTime::fromString(f.baseName(), "yyyy-MM-dd");
if (dateTime2 < dateTime1) { // 只要日志时间小于前7天的时间就删除
dir.remove(f.absoluteFilePath());
......@@ -967,11 +1087,11 @@ void MainWindow::deleteLogFile(){
}
void MainWindow::initFaceFaceRecognition() {
qSetting->beginGroup("cloudImageMap");
QStringList keys = qSetting->childKeys();
foreach(QString key, keys) {
QString value = qSetting->value(key).toString();
cloudImageMap[key]=value;
......@@ -979,7 +1099,7 @@ void MainWindow::initFaceFaceRecognition() {
qSetting->endGroup();
qSetting->beginGroup("localImageMap");
QStringList lokeys = qSetting->childKeys();
foreach(QString lk, lokeys) {
// 获取键对应的值
QString value = qSetting->value(lk).toString();
......
......@@ -7,6 +7,7 @@
#include "VidesData.h"
#include "MediaFaceImage.h"
#include "AlgorithmTaskManage.h"
#include "MqttSubscriber.h"
#include <algorithm>
#include <QString>
#include <QTextCodec>
......@@ -32,7 +33,7 @@ public:
explicit MainWindow();
void initCommon();
void setVideoPath(int flag, const QString& path);
void createDirectory(int flag,const QString& dirName, const QString& successMsg, const QString& failureMsg);
......@@ -40,10 +41,10 @@ public:
void initFaceFaceRecognition();
void initCameras(vides_data::cameraParameters &parameter,
const std::list<vides_data::responseArea>&areas,int algorithm,std::list<vides_data::requestCameraInfo>&camera_info_list);
__uint8_t intToUint8t(int algorithm);
void initCameras(vides_data::cameraParameters &parameter, vides_data::responseConfig &devConfig, const std::list<vides_data::responseArea>&areas,std::list<vides_data::requestCameraInfo>&camera_info_list);
__uint8_t intToUint8t(bool faceAlgorithm,bool licensePlateAlgorithm,bool uniformAlgorithm);
//盒子参数更新
void divParameterUpdate(vides_data::responseConfig &cloudConfig );
static MainWindow * sp_this;
......@@ -62,11 +63,13 @@ public:
void findLocalSerialNumber(QString &serialNumber);
void initDevConfigSyn(CameraHandle *cameraHandle);
void initDevConfigSyn(CameraHandle *cameraHandle,vides_data::responseConfig &devConfig);
void iniRecordingToString(QString &recorJson);
void initRecordingToString(QString &recorJson);
void iniEncodeToString(QString &enCodeJson);
void initDeviceEncodeToString(vides_data::responseConfig &source, QString &targetCodeJson);
void initEncodeToString(QString &enCodeJson);
void clearOfflineCameraHandle(QString sDevId, int nDevPort);
......@@ -89,8 +92,6 @@ private slots:
void clearHandle(QString sDevId, int nDevPort);
void deleteMkvFileTimer();
void handleMatNewConnection();
private:
//Ui::MainWindow *ui;
......@@ -98,9 +99,7 @@ private:
QSettings *qSetting;
QTimer *deleteLogFileTimer;
QTimer *deleteFrameFileTimer;
QTimer*dePermissionSynTimer;
QTcpServer server;
......@@ -115,6 +114,11 @@ private:
QString modelPaths;
std::map<QString,CameraHandle*>faceDetectionParkingPushs;
vides_data::responseConfig config;
vides_data::MqttConfig mqttConfig;
};
#endif // MAINWINDOW_H
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment