Commit 54c45fe9 by “liusq”

所有算法都放调度池

parent c2bb647b
#include "AlgorithmTaskManage.h" #include "AlgorithmTaskManage.h"
AlgorithmTaskManage::AlgorithmTaskManage(int humanDetectionLen):semaphore(humanDetectionLen), AlgorithmTaskManage::AlgorithmTaskManage(int humanDetectionLen,int licensePlateLen,int faceLen ):semaphore(humanDetectionLen),
plateSemaphore(humanDetectionLen){ plateSemaphore(licensePlateLen),faceSemaphore(faceLen){
this->humanDetectionLen=humanDetectionLen; this->humanDetectionLen=humanDetectionLen;
this->licensePlateLen=licensePlateLen;
this->faceLen=faceLen;
} }
void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths, void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths,
...@@ -15,12 +18,35 @@ void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths, ...@@ -15,12 +18,35 @@ void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths,
} }
void AlgorithmTaskManage::initLicensePlateManage(const QString &modelPaths, void AlgorithmTaskManage::initLicensePlateManage(const QString &modelPaths,
float carConfidence){ float carConfidence){
for (int i = 0; i < humanDetectionLen; ++i) { for (int i = 0; i < licensePlateLen; ++i) {
LicensePlateRecognition* licensePlateRecognition=new LicensePlateRecognition(modelPaths,carConfidence); LicensePlateRecognition* licensePlateRecognition=new LicensePlateRecognition(modelPaths,carConfidence);
licensePlateRecognitions.emplace_back(licensePlateRecognition); licensePlateRecognitions.emplace_back(licensePlateRecognition);
} }
} }
void AlgorithmTaskManage::initFaceReconitionHandle(std::map<QString,QString>&maps,int numberFaces,float confidence){
for (int i = 0; i < faceLen; ++i) {
FaceReconitionHandle *faceReconitionHandle= new FaceReconitionHandle();
faceReconitionHandle->initSourceImageMap(maps,numberFaces,confidence);
faceReconitionHandles.emplace_back(faceReconitionHandle);
}
}
void AlgorithmTaskManage::modifyImageFeature(std::map<QString,QString>&maps,int numberFaces,float confidence,bool isNull){
static int i=0;
printf("modifyImageFeature调用次数%d次\n", ++i);
std::lock_guard<std::mutex> lock(mtxFace);
for (FaceReconitionHandle* face : faceReconitionHandles) {
face->setImageChanged(true);
if(isNull){
face->featureRemove();
}else {
face->initSourceImageMap(maps,numberFaces,confidence);
}
}
}
AlgorithmTaskManage::~AlgorithmTaskManage(){ AlgorithmTaskManage::~AlgorithmTaskManage(){
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
...@@ -30,6 +56,9 @@ AlgorithmTaskManage::~AlgorithmTaskManage(){ ...@@ -30,6 +56,9 @@ AlgorithmTaskManage::~AlgorithmTaskManage(){
for (auto plate:licensePlateRecognitions) { for (auto plate:licensePlateRecognitions) {
instace.deleteObj(plate); instace.deleteObj(plate);
} }
for (auto face:faceReconitionHandles) {
instace.deleteObj(face);
}
} }
void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) { void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) {
...@@ -111,11 +140,67 @@ void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) { ...@@ -111,11 +140,67 @@ void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) {
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1); std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)]; return schedulableObjects[dis(gen)];
} else { }else if (scheType==0x03) {
std::lock_guard<std::mutex> lock(mtxFace);
// 获取当前时间作为基准
qint64 currentTime = QDateTime::currentSecsSinceEpoch();
qint64 maxWaitTime = 0;
// 记录最大等待时间的对象数量
int maxWaitTimeCount = 0;
std::vector<FaceReconitionHandle*> schedulableObjects;
// 遍历faceReconitionHandles,找到所有等待时间相同的未执行的FaceReconitionHandle对象
for (FaceReconitionHandle* face : faceReconitionHandles) {
if (face->getIsRunning()) continue;
// 计算此对象自上次执行以来的等待时间
qint64 waitTime = std::abs(currentTime - face->getThreadTime());
if (waitTime > maxWaitTime) {
schedulableObjects.clear();
schedulableObjects.push_back(face);
maxWaitTime = waitTime;
maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(face);
maxWaitTimeCount++;
}
}
// 如果最大等待时间的对象数量为1,直接返回
if (maxWaitTimeCount == 1) {
return schedulableObjects.at(0);
}
if (schedulableObjects.empty()) {
return nullptr; // 如果没有可调度对象,返回 nullptr 或进行适当处理
}
// 在可调度的对象中随机选择一个
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)];
}
else {
qInfo() << "参数错误"; qInfo() << "参数错误";
return nullptr; return nullptr;
} }
} }
void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces,QString sSn){
faceSemaphore.acquire();
ScopeSemaphoreExit guard([this]() {
faceSemaphore.release(); // 释放信号量
});
auto* selectedFaceReconition = static_cast<FaceReconitionHandle*>(schedulingAlgorithm(0x03));
if (selectedFaceReconition!=nullptr && !selectedFaceReconition->getImageChanged()) {
selectedFaceReconition->setIsRunning(true);
// 调用选定对象的doesItExistEmployee函数
qInfo() << "人脸识别算法抢到===>sn"<<sSn<<selectedFaceReconition;
selectedFaceReconition->doesItExistEmployee(source, faces);
} else {
qDebug() << "没有可用的LicensePlateRecognition对象可以调度";
return ;
}
}
void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate, void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
......
...@@ -5,6 +5,7 @@ ...@@ -5,6 +5,7 @@
#include "VidesData.h" #include "VidesData.h"
#include "ScopeSemaphoreExit.h" #include "ScopeSemaphoreExit.h"
#include "LicensePlateRecognition.h" #include "LicensePlateRecognition.h"
#include "FaceReconitionHandle.h"
#include <QThread> #include <QThread>
#include <random> #include <random>
#include <QSemaphore> #include <QSemaphore>
...@@ -13,11 +14,11 @@ ...@@ -13,11 +14,11 @@
#include <mutex> #include <mutex>
class AlgorithmTaskManage{ class AlgorithmTaskManage{
public: public:
AlgorithmTaskManage(int humanDetectionLen); AlgorithmTaskManage(int humanDetectionLen,int licensePlateLen,int faceLen);
~AlgorithmTaskManage(); ~AlgorithmTaskManage();
static AlgorithmTaskManage& getInstance(int humanDetectionLen) static AlgorithmTaskManage& getInstance(int humanDetectionLen,int licensePlateLen,int faceLen)
{ {
static AlgorithmTaskManage instance(humanDetectionLen); static AlgorithmTaskManage instance(humanDetectionLen,licensePlateLen,faceLen);
return instance; return instance;
} }
void initHumanDetectionManage(const QString &modelPaths, void initHumanDetectionManage(const QString &modelPaths,
...@@ -25,33 +26,49 @@ public: ...@@ -25,33 +26,49 @@ public:
void initLicensePlateManage(const QString &modelPaths, void initLicensePlateManage(const QString &modelPaths,
float carConfidence); float carConfidence);
int executeFindHuManCar(const cv::Mat &source,int res,std::vector<vides_data::ParkingArea> &currentPlate,
std::map<int,int>&resMap, QString sSn);
void modifyImageFeature(std::map<QString,QString>&maps,int numberFaces,float confidence,bool isNull);
void initFaceReconitionHandle(std::map<QString,QString>&maps,int numberFaces,float confidence);
void *schedulingAlgorithm(int scheType); void *schedulingAlgorithm(int scheType);
int executeFindHuManCar(const cv::Mat &source,int res,std::vector<vides_data::ParkingArea> &currentPlate,
std::map<int,int>&resMap, QString sSn);
void executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate, void executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime,QString sSn); qint64 currentTime,QString sSn);
void executeFindDoesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces,QString sSn);
private: private:
static AlgorithmTaskManage* instance; static AlgorithmTaskManage* instance;
std::vector<HumanDetection*>humanDetections; std::vector<HumanDetection*>humanDetections;
std::vector<LicensePlateRecognition*>licensePlateRecognitions; std::vector<LicensePlateRecognition*>licensePlateRecognitions;
std::vector<FaceReconitionHandle*>faceReconitionHandles;
int humanDetectionLen; int humanDetectionLen;
int licensePlateLen;
int faceLen;
QSemaphore semaphore; QSemaphore semaphore;
QSemaphore plateSemaphore; QSemaphore plateSemaphore;
QSemaphore faceSemaphore;
std::mutex mtxHuman; std::mutex mtxHuman;
std::mutex mtxLicense; std::mutex mtxLicense;
std::mutex mtxFace;
}; };
......
...@@ -19,7 +19,6 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch ...@@ -19,7 +19,6 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch
dev_snap_syn_timer(new QTimer()), dev_snap_syn_timer(new QTimer()),
image_save(imageSave), image_save(imageSave),
faceCount(0), faceCount(0),
faceReconitionHandle(new FaceReconitionHandle()),
semaphore(1) { semaphore(1) {
connect(this, SIGNAL(afterDownloadFile(int,int,QString)), this, SLOT(pushRecordToCloud(int,int,QString)),Qt::QueuedConnection); connect(this, SIGNAL(afterDownloadFile(int,int,QString)), this, SLOT(pushRecordToCloud(int,int,QString)),Qt::QueuedConnection);
...@@ -28,13 +27,6 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch ...@@ -28,13 +27,6 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch
faceMapWorker.setY(0); faceMapWorker.setY(0);
} }
void CameraHandle::notificationUpdateImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence){
faceReconitionHandle->initSourceImageMap(maps,numberFaces,confidence);
}
void CameraHandle::featureRemove(){
faceReconitionHandle->featureRemove();
}
CameraHandle::~CameraHandle() { CameraHandle::~CameraHandle() {
stopRequested_=true; stopRequested_=true;
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
...@@ -51,7 +43,6 @@ CameraHandle::~CameraHandle() { ...@@ -51,7 +43,6 @@ CameraHandle::~CameraHandle() {
} }
parkMap.clear(); parkMap.clear();
XSDK_DevLogout(this->hDevice); XSDK_DevLogout(this->hDevice);
instace.deleteObj(faceReconitionHandle);
qInfo() << "CameraHandle:成功"; qInfo() << "CameraHandle:成功";
} }
...@@ -447,7 +438,10 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma ...@@ -447,7 +438,10 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
if(newInfo.getLicensePlate().length()<=0){ if(newInfo.getLicensePlate().length()<=0){
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
int humanLen=instace.getHumanDetectionLen(); int humanLen=instace.getHumanDetectionLen();
AlgorithmTaskManage &algorithmTaskManage=AlgorithmTaskManage::getInstance(humanLen); int licensePlateLen=instace.getLicensePlateLen();
int faceLen=instace.getFaceLen();
AlgorithmTaskManage &algorithmTaskManage=AlgorithmTaskManage::getInstance(humanLen,licensePlateLen,faceLen);
std::vector<vides_data::ParkingArea> currentPlates; std::vector<vides_data::ParkingArea> currentPlates;
std::map<int,int>resMap; std::map<int,int>resMap;
...@@ -547,8 +541,10 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -547,8 +541,10 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
int height = frame.rows; // 获取图像高度 int height = frame.rows; // 获取图像高度
int humanlen=instace.getHumanDetectionLen(); int humanlen=instace.getHumanDetectionLen();
qDebug()<<"frame 宽度:"<<width<<"frame 高度:"<<height; qDebug()<<"frame 宽度:"<<width<<"frame 高度:"<<height;
AlgorithmTaskManage &algorithmTaskManage=AlgorithmTaskManage::getInstance(humanlen); int licensePlateLen=instace.getLicensePlateLen();
int faceLen=instace.getFaceLen();
AlgorithmTaskManage &algorithmTaskManage=AlgorithmTaskManage::getInstance(humanlen,licensePlateLen,faceLen);
std::map<QString,vides_data::requestFaceReconition> mapFaces; std::map<QString,vides_data::requestFaceReconition> mapFaces;
...@@ -573,12 +569,10 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -573,12 +569,10 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
} }
}else{ }else{
//人形 //人形
if((algorithmPermissions & 0x01<<1) != 0){ uniforms=algorithmTaskManage.executeFindHuManCar(frame,0x00,currentPlates,resMap,sSn);
uniforms=algorithmTaskManage.executeFindHuManCar(frame,0x00,currentPlates,resMap,sSn); if(currentPlates.size()>0x00){
if(currentPlates.size()>0x00){ faSize=resMap.at(0x00);
faSize=resMap.at(0x00); uniforms=faSize;
uniforms=faSize;
}
} }
} }
qint64 elapsedTime = timer.elapsed(); qint64 elapsedTime = timer.elapsed();
...@@ -614,7 +608,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -614,7 +608,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
if(faSize>0 && (algorithmPermissions & 0x01<<1) != 0){ if(faSize>0 && (algorithmPermissions & 0x01<<1) != 0){
qDebug() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId()<<sSn; qDebug() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId()<<sSn;
std::list<vides_data::faceRecognitionResult>faces; std::list<vides_data::faceRecognitionResult>faces;
faceReconitionHandle->doesItExistEmployee(frame,faces); algorithmTaskManage.executeFindDoesItExistEmployee(frame,faces,sSn);
qInfo()<<"识别的人脸数量==>"<<faces.size(); qInfo()<<"识别的人脸数量==>"<<faces.size();
if (faces.size()>0) { if (faces.size()>0) {
...@@ -736,7 +730,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -736,7 +730,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
} }
vides_data::requestLicensePlate resultPlate; vides_data::requestLicensePlate resultPlate;
resultPlate.sn=sSn; resultPlate.sn=sSn;
algorithmTaskManage.executeFindlicensePlateNumber(areaMat, lpNumber,resultPlate,currentTime,sSn); algorithmTaskManage.executeFindlicensePlateNumber(areaMat, lpNumber,resultPlate,currentTime,sSn);
std::list<vides_data::LicensePlate>ps =resultPlate.plates; std::list<vides_data::LicensePlate>ps =resultPlate.plates;
......
...@@ -158,8 +158,6 @@ private : ...@@ -158,8 +158,6 @@ private :
std::map<int, vides_data::responseRecognitionData> videoCurrentData; std::map<int, vides_data::responseRecognitionData> videoCurrentData;
std::map<QString, QString> currentData; std::map<QString, QString> currentData;
FaceReconitionHandle *faceReconitionHandle;
//每个区域编号对应一个区域信息 //每个区域编号对应一个区域信息
std::map<int,ParkingSpaceInfo*>parkMap; std::map<int,ParkingSpaceInfo*>parkMap;
......
...@@ -80,8 +80,16 @@ void Common::setCarConfidenceMin(float carConfidenceMin){ ...@@ -80,8 +80,16 @@ void Common::setCarConfidenceMin(float carConfidenceMin){
int Common::getHumanDetectionLen() const{ int Common::getHumanDetectionLen() const{
return humanDetectionLen; return humanDetectionLen;
} }
void Common::setHumanDetectionLen(int humanDetectionLen){ int Common::getLicensePlateLen() const{
return licensePlateLen;
}
int Common::getFaceLen() const{
return faceLen;
}
void Common::setTaskManage(int humanDetectionLen,int licensePlateLen,int faceLen){
this->humanDetectionLen=humanDetectionLen; this->humanDetectionLen=humanDetectionLen;
this->licensePlateLen=licensePlateLen;
this->faceLen=faceLen;
} }
QString Common::GetLocalIp() { QString Common::GetLocalIp() {
QString ipAddress; QString ipAddress;
......
...@@ -51,7 +51,9 @@ public: ...@@ -51,7 +51,9 @@ public:
void setCarConfidenceMin(float carConfidenceMin); void setCarConfidenceMin(float carConfidenceMin);
int getHumanDetectionLen() const; int getHumanDetectionLen() const;
void setHumanDetectionLen(int humanDetectionLen); int getLicensePlateLen() const;
int getFaceLen() const;
void setTaskManage(int humanDetectionLen,int licensePlateLen,int faceLen);
template <typename T> template <typename T>
const T& clamp(const T& v, const T& lo, const T& hi) const T& clamp(const T& v, const T& lo, const T& hi)
...@@ -73,6 +75,8 @@ private: ...@@ -73,6 +75,8 @@ private:
float carConfidenceMax; float carConfidenceMax;
float carConfidenceMin; float carConfidenceMin;
int humanDetectionLen; int humanDetectionLen;
int licensePlateLen;
int faceLen;
Common(); Common();
~Common(); ~Common();
......
...@@ -11,15 +11,37 @@ using namespace cimg_library; ...@@ -11,15 +11,37 @@ using namespace cimg_library;
FaceReconitionHandle::FaceReconitionHandle() { FaceReconitionHandle::FaceReconitionHandle() {
static int ii=0;
printf("FaceReconitionHandle 创建调用次数%d次\n", ++ii);
} }
FaceReconitionHandle::~FaceReconitionHandle(){ FaceReconitionHandle::~FaceReconitionHandle(){
if (ctxHandle != nullptr) { if (ctxHandle != nullptr) {
static int ii=0;
printf("FaceReconitionHandle 销毁调用次数%d次\n", ++ii);
HF_ReleaseFaceContext(ctxHandle); HF_ReleaseFaceContext(ctxHandle);
ctxHandle = nullptr; ctxHandle = nullptr;
} }
} }
qint64 FaceReconitionHandle::getThreadTime() const{
return thread_time.load(std::memory_order_acquire);
}
bool FaceReconitionHandle::getIsRunning() const{
return isRunning.load(std::memory_order_acquire);
}
void FaceReconitionHandle::setIsRunning(bool running){
this->isRunning.store(running, std::memory_order_acquire);
}
bool FaceReconitionHandle::getImageChanged()const{
return isImageChanged.load(std::memory_order_acquire);
}
void FaceReconitionHandle::setImageChanged(bool imageChanged){
this->isImageChanged.store(imageChanged, std::memory_order_acquire);
}
cv::Mat FaceReconitionHandle::loadImage(const QString &path) { cv::Mat FaceReconitionHandle::loadImage(const QString &path) {
// 尝试使用OpenCV直接加载图像 // 尝试使用OpenCV直接加载图像
...@@ -34,7 +56,9 @@ cv::Mat FaceReconitionHandle::loadImage(const QString &path) { ...@@ -34,7 +56,9 @@ cv::Mat FaceReconitionHandle::loadImage(const QString &path) {
} }
void FaceReconitionHandle::initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence){ void FaceReconitionHandle::initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence){
QWriteLocker locker(&rwLock); ScopeSemaphoreExit streamGuard([this]() {
isImageChanged.store(false, std::memory_order_acquire);
});
featureRemove(); featureRemove();
HResult ret; HResult ret;
// 初始化context // 初始化context
...@@ -133,12 +157,14 @@ void FaceReconitionHandle::initSourceImageMap(std::map<QString,QString>&maps,int ...@@ -133,12 +157,14 @@ void FaceReconitionHandle::initSourceImageMap(std::map<QString,QString>&maps,int
} }
} }
void FaceReconitionHandle::featureRemove(){ void FaceReconitionHandle::featureRemove(){
if(customIds.size()>0){ if(customIds.size()>0){
for(auto customId:customIds){ for(auto customId:customIds){
HResult ret= HF_FeaturesGroupFeatureRemove(ctxHandle,customId); HResult ret= HF_FeaturesGroupFeatureRemove(ctxHandle,customId);
qDebug()<<"ret:featureRemove "<<ret; qDebug()<<"ret:featureRemove "<<ret;
} }
setImageChanged(false);
} }
} }
...@@ -178,8 +204,10 @@ cv::Mat FaceReconitionHandle::loadImageFromByteStream(const QString& filePath) { ...@@ -178,8 +204,10 @@ cv::Mat FaceReconitionHandle::loadImageFromByteStream(const QString& filePath) {
void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
QReadLocker locker(&rwLock); thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_acquire);
ScopeSemaphoreExit streamGuard([this]() {
isRunning.store(false, std::memory_order_acquire);
});
HResult ret; HResult ret;
HF_ContextCustomParameter parameter = {0}; HF_ContextCustomParameter parameter = {0};
HF_ImageData imageData = {0}; HF_ImageData imageData = {0};
...@@ -190,6 +218,8 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v ...@@ -190,6 +218,8 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
imageData.format = FORMAT_BGR; imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle; HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle); ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
if (ret != HSUCCEED) { if (ret != HSUCCEED) {
qInfo()<<QString("image handle error:%1").arg((long) imageSteamHandle,0,10); qInfo()<<QString("image handle error:%1").arg((long) imageSteamHandle,0,10);
......
...@@ -5,8 +5,8 @@ ...@@ -5,8 +5,8 @@
#include "herror.h" #include "herror.h"
#include "LogHandle.h" #include "LogHandle.h"
#include "VidesData.h" #include "VidesData.h"
#include "ScopeSemaphoreExit.h"
#include <opencv2/opencv.hpp> #include <opencv2/opencv.hpp>
#include <QReadWriteLock>
#include<QCoreApplication> #include<QCoreApplication>
...@@ -19,7 +19,11 @@ private: ...@@ -19,7 +19,11 @@ private:
std::vector<int32_t>customIds; std::vector<int32_t>customIds;
QReadWriteLock rwLock; std::atomic<qint64> thread_time;
std::atomic<bool> isRunning{false};
std::atomic<bool>isImageChanged{false};
public: public:
FaceReconitionHandle(); FaceReconitionHandle();
~FaceReconitionHandle(); ~FaceReconitionHandle();
...@@ -27,6 +31,15 @@ public: ...@@ -27,6 +31,15 @@ public:
cv::Mat loadImage(const QString &path); cv::Mat loadImage(const QString &path);
cv::Mat loadImageFromByteStream(const QString& filePath); cv::Mat loadImageFromByteStream(const QString& filePath);
qint64 getThreadTime() const;
bool getIsRunning() const;
void setIsRunning(bool running);
bool getImageChanged()const;
void setImageChanged(bool imageChanged);
void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face); void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face);
......
...@@ -46,13 +46,16 @@ MainWindow::MainWindow() ...@@ -46,13 +46,16 @@ MainWindow::MainWindow()
float heightReference=qSetting->value("devices/height_reference").toFloat(); float heightReference=qSetting->value("devices/height_reference").toFloat();
int uniformColor=qSetting->value("devices/uniformColor").toInt(); int uniformColor=qSetting->value("devices/uniformColor").toInt();
int humanDetectionLen=qSetting->value("devices/humanDetectionLen").toInt(); int humanDetectionLen=qSetting->value("devices/humanDetectionLen").toInt();
int licensePlateLen=qSetting->value("devices/licensePlateLen").toInt();
int faceLen=qSetting->value("devices/faceLen").toInt();
float carShapeConfidence=qSetting->value("devices/carShapeConfidence").toFloat(); float carShapeConfidence=qSetting->value("devices/carShapeConfidence").toFloat();
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
instace.setHumanDetectionLen(humanDetectionLen); instace.setTaskManage(humanDetectionLen,licensePlateLen,faceLen);
float carConfidence=qSetting->value("devices/carConfidence").toFloat(); float carConfidence=qSetting->value("devices/carConfidence").toFloat();
AlgorithmTaskManage &algorithmTaskManage= AlgorithmTaskManage::getInstance(humanDetectionLen,licensePlateLen,faceLen);
AlgorithmTaskManage &algorithmTaskManage= AlgorithmTaskManage::getInstance(humanDetectionLen);
algorithmTaskManage.initHumanDetectionManage(modelPaths,carShapeConfidence,heightReference,uniformColor); algorithmTaskManage.initHumanDetectionManage(modelPaths,carShapeConfidence,heightReference,uniformColor);
algorithmTaskManage.initLicensePlateManage(modelPaths,carConfidence); algorithmTaskManage.initLicensePlateManage(modelPaths,carConfidence);
QString httpurl; QString httpurl;
...@@ -83,7 +86,11 @@ MainWindow::MainWindow() ...@@ -83,7 +86,11 @@ MainWindow::MainWindow()
},Qt::QueuedConnection); },Qt::QueuedConnection);
this->startCamera(httpurl); this->startCamera(httpurl);
batchUpdatesCameraImageMap();
float confidence=qSetting->value("devices/confidence").toFloat();
int faceNumbers=qSetting->value("devices/faceNumbers").toInt();
algorithmTaskManage.initFaceReconitionHandle(localImageMap,faceNumbers,confidence);
// 设置定时器间隔 // 设置定时器间隔
dePermissionSynTimer->setInterval(dePermissionTimer); dePermissionSynTimer->setInterval(dePermissionTimer);
...@@ -292,19 +299,27 @@ void MainWindow::updateLocalFace(const QString &httpurl) { ...@@ -292,19 +299,27 @@ void MainWindow::updateLocalFace(const QString &httpurl) {
} }
if (isChanged) { if (isChanged) {
int humalen=instance.getHumanDetectionLen();
int licensePlateLen=instance.getLicensePlateLen();
int facelen=instance.getFaceLen();
float confidence=qSetting->value("devices/confidence").toFloat();
int faceNumbers=qSetting->value("devices/faceNumbers").toInt();
AlgorithmTaskManage &algorithmTaskManage= AlgorithmTaskManage::getInstance(humalen,licensePlateLen,facelen);
if (cloudImageMap.empty()) { if (cloudImageMap.empty()) {
// 如果云端映射现在为空,移除所有特征 // 如果云端映射现在为空,移除所有特征
//faceRecognition.featureRemove(); //faceRecognition.featureRemove();
batchUpdatesFeatureRemove(); algorithmTaskManage.modifyImageFeature(localImageMap,faceNumbers,confidence,true);
} else { } else {
//float confidence=qSetting->value("devices/confidence").toFloat(); //float confidence=qSetting->value("devices/confidence").toFloat();
//int faceNumbers=qSetting->value("devices/faceNumbers").toInt(); //int faceNumbers=qSetting->value("devices/faceNumbers").toInt();
qDebug()<<"startMap != endMap-->"; qDebug()<<"startMap != endMap-->";
// faceRecognition.initSourceImageMap(localImageMap,faceNumbers, confidence); // faceRecognition.initSourceImageMap(localImageMap,faceNumbers, confidence);
batchUpdatesCameraImageMap(); algorithmTaskManage.modifyImageFeature(localImageMap,faceNumbers,confidence,false);
} }
} }
for (vides_data::responseFaceReconition* data : datas) for (vides_data::responseFaceReconition* data : datas)
{ {
instance.deleteObj(data); instance.deleteObj(data);
...@@ -312,26 +327,6 @@ void MainWindow::updateLocalFace(const QString &httpurl) { ...@@ -312,26 +327,6 @@ void MainWindow::updateLocalFace(const QString &httpurl) {
datas.clear(); // 清空列表 datas.clear(); // 清空列表
instance.deleteObj(res); instance.deleteObj(res);
} }
void MainWindow::batchUpdatesCameraImageMap(){
float confidence=qSetting->value("devices/confidence").toFloat();
int faceNumbers=qSetting->value("devices/faceNumbers").toInt();
for(auto iter = faceDetectionParkingPushs.begin(); iter != faceDetectionParkingPushs.end(); ++iter) {
CameraHandle*value= iter->second;
if(localImageMap.size()>0){
value->notificationUpdateImageMap(localImageMap,faceNumbers,confidence);
}
}
}
void MainWindow::batchUpdatesFeatureRemove(){
for(auto iter = faceDetectionParkingPushs.begin(); iter != faceDetectionParkingPushs.end(); ++iter) {
CameraHandle*value= iter->second;
if(localImageMap.size()>0){
value->featureRemove();
}
}
}
void MainWindow::findLocalSerialNumber(QString &serialNumber){ void MainWindow::findLocalSerialNumber(QString &serialNumber){
if(vides_data::isVirtualMachine()){ if(vides_data::isVirtualMachine()){
serialNumber = QSysInfo::machineUniqueId(); serialNumber = QSysInfo::machineUniqueId();
......
...@@ -77,10 +77,6 @@ public: ...@@ -77,10 +77,6 @@ public:
void deleteCloudNotCamer (const std::map<QString,vides_data::localDeviceStatus*>& localDevices, void deleteCloudNotCamer (const std::map<QString,vides_data::localDeviceStatus*>& localDevices,
const std::list<vides_data::responseDeviceStatus>& devices); const std::list<vides_data::responseDeviceStatus>& devices);
void batchUpdatesCameraImageMap();
void batchUpdatesFeatureRemove();
~MainWindow(); ~MainWindow();
signals: signals:
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment