Commit c2bb647b by “liusq”

车牌,人形增加线程复用

parent 6b53f770
#include "HumanDetectionManage.h" #include "AlgorithmTaskManage.h"
HumanDetectionManage::HumanDetectionManage(int humanDetectionLen):semaphore(humanDetectionLen){ AlgorithmTaskManage::AlgorithmTaskManage(int humanDetectionLen):semaphore(humanDetectionLen),
plateSemaphore(humanDetectionLen){
this->humanDetectionLen=humanDetectionLen; this->humanDetectionLen=humanDetectionLen;
} }
void HumanDetectionManage::initHumanDetectionManage(const QString &modelPaths, void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,float &height_reference,int &uniformColor) { float carShapeConfidence,float &height_reference,int &uniformColor) {
for (int i = 0; i < humanDetectionLen; ++i) { for (int i = 0; i < humanDetectionLen; ++i) {
HumanDetection* human=new HumanDetection(modelPaths,carShapeConfidence); HumanDetection* human=new HumanDetection(modelPaths,carShapeConfidence);
human->setHuManParameter(height_reference,uniformColor); human->setHuManParameter(height_reference,uniformColor);
humanDetections.emplace_back(human); humanDetections.emplace_back(human);
} }
} }
HumanDetectionManage::~HumanDetectionManage(){ void AlgorithmTaskManage::initLicensePlateManage(const QString &modelPaths,
float carConfidence){
for (int i = 0; i < humanDetectionLen; ++i) {
LicensePlateRecognition* licensePlateRecognition=new LicensePlateRecognition(modelPaths,carConfidence);
licensePlateRecognitions.emplace_back(licensePlateRecognition);
}
}
AlgorithmTaskManage::~AlgorithmTaskManage(){
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
for (auto task:humanDetections) { for (auto task:humanDetections) {
instace.deleteObj(task); instace.deleteObj(task);
} }
for (auto plate:licensePlateRecognitions) {
instace.deleteObj(plate);
}
} }
HumanDetection* HumanDetectionManage::schedulingAlgorithm(QString sSn) { void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) {
// 获取当前时间作为基准 if (scheType == 0x01) {
qint64 currentTime = QDateTime::currentSecsSinceEpoch(); std::lock_guard<std::mutex> lock(mtxHuman);
// 获取当前时间作为基准
qint64 currentTime = QDateTime::currentSecsSinceEpoch();
qint64 maxWaitTime = 0;
// 记录最大等待时间的对象数量
int maxWaitTimeCount = 0;
std::vector<HumanDetection*> schedulableObjects;
// 遍历humanDetections,找到所有等待时间相同的未执行的HumanDetection对象
for (HumanDetection* human : humanDetections) {
if (human->getIsRunning()) continue;
// 创建一个vector来存储所有可调度的对象 // 计算此对象自上次执行以来的等待时间
std::vector<HumanDetection*> schedulableObjects; qint64 waitTime = std::abs(currentTime - human->getThreadTime());
if (waitTime > maxWaitTime) {
schedulableObjects.clear();
schedulableObjects.push_back(human);
maxWaitTime = waitTime;
maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(human);
maxWaitTimeCount++;
}
}
// 如果最大等待时间的对象数量为1,直接返回
if (maxWaitTimeCount == 1) {
return schedulableObjects.at(0);
}
if (schedulableObjects.empty()) {
return nullptr; // 如果没有可调度对象,返回 nullptr 或进行适当处理
}
// 在可调度的对象中随机选择一个
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
qint64 maxWaitTime = 0; return schedulableObjects[dis(gen)];
} else if (scheType == 0x02) {
std::lock_guard<std::mutex> lock(mtxLicense);
// 获取当前时间作为基准
qint64 currentTime = QDateTime::currentSecsSinceEpoch();
// 记录最大等待时间的对象数量 qint64 maxWaitTime = 0;
int maxWaitTimeCount = 0; // 记录最大等待时间的对象数量
int maxWaitTimeCount = 0;
std::vector<LicensePlateRecognition*> schedulableObjects;
// 遍历humanDetections,找到所有等待时间相同的未执行的HumanDetection对象 // 遍历licensePlateRecognitions,找到所有等待时间相同的未执行的LicensePlateRecognition对象
for (HumanDetection* human : humanDetections) { for (LicensePlateRecognition* licensePlateRecognition : licensePlateRecognitions) {
if (human->getIsRunning()) continue; if (licensePlateRecognition->getIsRunning()) continue;
// 计算此对象自上次执行以来的等待时间 // 计算此对象自上次执行以来的等待时间
qint64 waitTime = std::abs(currentTime - human->getThreadTime()); qint64 waitTime = std::abs(currentTime - licensePlateRecognition->getThreadTime());
if (waitTime > maxWaitTime) { if (waitTime > maxWaitTime) {
schedulableObjects.clear(); schedulableObjects.clear();
schedulableObjects.push_back(human); schedulableObjects.push_back(licensePlateRecognition);
maxWaitTime = waitTime; maxWaitTime = waitTime;
maxWaitTimeCount = 1; maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) { } else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(human); schedulableObjects.push_back(licensePlateRecognition);
maxWaitTimeCount++; maxWaitTimeCount++;
}
} }
} // 如果最大等待时间的对象数量为1,直接返回
// 如果最大等待时间的对象数量为1,直接返回 if (maxWaitTimeCount == 1) {
if (maxWaitTimeCount == 1) { return schedulableObjects.at(0);
return schedulableObjects.at(0); }
} if (schedulableObjects.empty()) {
if (schedulableObjects.empty()) { return nullptr; // 如果没有可调度对象,返回 nullptr 或进行适当处理
return nullptr; // 如果没有可调度对象,返回 nullptr 或进行适当处理 }
} // 在可调度的对象中随机选择一个
// 在可调度的对象中随机选择一个 std::random_device rd;
std::random_device rd; std::mt19937 gen(rd());
std::mt19937 gen(rd()); std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)]; return schedulableObjects[dis(gen)];
} else {
qInfo() << "参数错误";
return nullptr;
}
} }
int HumanDetectionManage::executeFindHuManCar(const cv::Mat &source, int res, void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
std::vector<vides_data::ParkingArea> &currentPlate,std::map<int,int>&resMap,QString sSn) { qint64 currentTime,QString sSn){
plateSemaphore.acquire();
ScopeSemaphoreExit guard([this]() {
plateSemaphore.release(); // 释放信号量
});
auto* selectedLicensePlate = static_cast<LicensePlateRecognition*>(schedulingAlgorithm(0x02));
if (selectedLicensePlate!=nullptr) {
selectedLicensePlate->setIsRunning(true);
// 调用选定对象的findHuManCar函数
qInfo() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate;
selectedLicensePlate->licensePlateNumber(source, lpNumber,plate, currentTime);
} else {
qDebug() << "没有可用的LicensePlateRecognition对象可以调度";
return ;
}
}
int AlgorithmTaskManage::executeFindHuManCar(const cv::Mat &source, int res,
std::vector<vides_data::ParkingArea> &currentPlate,std::map<int,int>&resMap,QString sSn) {
semaphore.acquire(); semaphore.acquire();
ScopeSemaphoreExit guard([this]() { ScopeSemaphoreExit guard([this]() {
semaphore.release(); // 释放信号量 semaphore.release(); // 释放信号量
}); });
HumanDetection* selectedHumanDetection = schedulingAlgorithm(sSn); auto* selectedHumanDetection = static_cast<HumanDetection*>(schedulingAlgorithm(0x01));
if (selectedHumanDetection!=nullptr) { if (selectedHumanDetection!=nullptr) {
selectedHumanDetection->setIsRunning(true);
// 调用选定对象的findHuManCar函数 // 调用选定对象的findHuManCar函数
qInfo() << "调度算法抢到===>sn"<<sSn<<"res"<<res; qInfo() << "人形调度算法抢到===>sn"<<sSn<<"res"<<selectedHumanDetection;
int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, currentPlate); int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, currentPlate);
return detectionResult; return detectionResult;
} else { } else {
......
...@@ -4,42 +4,55 @@ ...@@ -4,42 +4,55 @@
#include "Common.h" #include "Common.h"
#include "VidesData.h" #include "VidesData.h"
#include "ScopeSemaphoreExit.h" #include "ScopeSemaphoreExit.h"
#include <QWaitCondition> #include "LicensePlateRecognition.h"
#include <QMutex>
#include <QThread> #include <QThread>
#include <random> #include <random>
#include <QSemaphore> #include <QSemaphore>
#include <vector> #include <vector>
#include <opencv2/opencv.hpp> #include <opencv2/opencv.hpp>
class HumanDetectionManage{ #include <mutex>
class AlgorithmTaskManage{
public: public:
HumanDetectionManage(int humanDetectionLen); AlgorithmTaskManage(int humanDetectionLen);
~HumanDetectionManage(); ~AlgorithmTaskManage();
static HumanDetectionManage& getInstance(int humanDetectionLen) static AlgorithmTaskManage& getInstance(int humanDetectionLen)
{ {
static HumanDetectionManage instance(humanDetectionLen); static AlgorithmTaskManage instance(humanDetectionLen);
return instance; return instance;
} }
void initHumanDetectionManage(const QString &modelPaths, void initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,float &height_reference,int &uniformColor); float carShapeConfidence,float &height_reference,int &uniformColor);
void initLicensePlateManage(const QString &modelPaths,
float carConfidence);
int executeFindHuManCar(const cv::Mat &source,int res,std::vector<vides_data::ParkingArea> &currentPlate, int executeFindHuManCar(const cv::Mat &source,int res,std::vector<vides_data::ParkingArea> &currentPlate,
std::map<int,int>&resMap, QString sSn); std::map<int,int>&resMap, QString sSn);
HumanDetection *schedulingAlgorithm(QString sSn); void *schedulingAlgorithm(int scheType);
void executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime,QString sSn);
private: private:
static HumanDetectionManage* instance; static AlgorithmTaskManage* instance;
std::vector<HumanDetection*>humanDetections; std::vector<HumanDetection*>humanDetections;
std::vector<LicensePlateRecognition*>licensePlateRecognitions;
int humanDetectionLen; int humanDetectionLen;
QSemaphore semaphore; QSemaphore semaphore;
QSemaphore plateSemaphore;
std::mutex mtxHuman;
QWaitCondition waitCondition; std::mutex mtxLicense;
QMutex mutex;
}; };
......
#include "CameraHandle.h" #include "CameraHandle.h"
#include "TaskRunnable.h" #include "TaskRunnable.h"
#include "HumanDetectionManage.h" #include "AlgorithmTaskManage.h"
#include "ScopeSemaphoreExit.h" #include "ScopeSemaphoreExit.h"
#include <QElapsedTimer> #include <QElapsedTimer>
#include <QRegularExpression> #include <QRegularExpression>
...@@ -447,11 +447,11 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma ...@@ -447,11 +447,11 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
if(newInfo.getLicensePlate().length()<=0){ if(newInfo.getLicensePlate().length()<=0){
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
int humanLen=instace.getHumanDetectionLen(); int humanLen=instace.getHumanDetectionLen();
HumanDetectionManage &humanDetectionManage=HumanDetectionManage::getInstance(humanLen); AlgorithmTaskManage &algorithmTaskManage=AlgorithmTaskManage::getInstance(humanLen);
std::vector<vides_data::ParkingArea> currentPlates; std::vector<vides_data::ParkingArea> currentPlates;
std::map<int,int>resMap; std::map<int,int>resMap;
int car_size =humanDetectionManage.executeFindHuManCar(frame,0x01,currentPlates,resMap,sSn); int car_size =algorithmTaskManage.executeFindHuManCar(frame,0x01,currentPlates,resMap,sSn);
qDebug()<<sSn<<":"<<"当前车形数量:"<<car_size; qDebug()<<sSn<<":"<<"当前车形数量:"<<car_size;
if (car_size <= 0 && car_size!=-2) { if (car_size <= 0 && car_size!=-2) {
...@@ -547,8 +547,9 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -547,8 +547,9 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
int height = frame.rows; // 获取图像高度 int height = frame.rows; // 获取图像高度
int humanlen=instace.getHumanDetectionLen(); int humanlen=instace.getHumanDetectionLen();
qDebug()<<"frame 宽度:"<<width<<"frame 高度:"<<height; qDebug()<<"frame 宽度:"<<width<<"frame 高度:"<<height;
HumanDetectionManage &humanDetectionManage=HumanDetectionManage::getInstance(humanlen); AlgorithmTaskManage &algorithmTaskManage=AlgorithmTaskManage::getInstance(humanlen);
LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance();
std::map<QString,vides_data::requestFaceReconition> mapFaces; std::map<QString,vides_data::requestFaceReconition> mapFaces;
...@@ -565,22 +566,24 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -565,22 +566,24 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
//穿工服算法 //穿工服算法
if ((algorithmPermissions & 0x01<<2) != 0) { if ((algorithmPermissions & 0x01<<2) != 0) {
uniforms=humanDetectionManage.executeFindHuManCar(frame,0x02,currentPlates,resMap,sSn); uniforms=algorithmTaskManage.executeFindHuManCar(frame,0x02,currentPlates,resMap,sSn);
if(uniforms!=0x00){ if(currentPlates.size()>0x00){
faSize=resMap.at(0x00); faSize=resMap.at(0x00);
uniforms=resMap.at(0x02); uniforms=resMap.at(0x02);
} }
}else{ }else{
//人形 //人形
if((algorithmPermissions & 0x01<<1) != 0){ if((algorithmPermissions & 0x01<<1) != 0){
uniforms=humanDetectionManage.executeFindHuManCar(frame,0x00,currentPlates,resMap,sSn); uniforms=algorithmTaskManage.executeFindHuManCar(frame,0x00,currentPlates,resMap,sSn);
if(uniforms!=0x00){ if(currentPlates.size()>0x00){
faSize=resMap.at(0x00); faSize=resMap.at(0x00);
uniforms=resMap.at(0x02); uniforms=faSize;
} }
} }
} }
qint64 elapsedTime = timer.elapsed(); qint64 elapsedTime = timer.elapsed();
qInfo()<<"人脸数量==>"<<faSize;
qInfo()<<"未穿工服数量==>"<<uniforms;
qInfo() << "humanDetectionManage.executeFindHuManCa:执行时间"<<elapsedTime / 1000; qInfo() << "humanDetectionManage.executeFindHuManCa:执行时间"<<elapsedTime / 1000;
...@@ -595,6 +598,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -595,6 +598,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
if ((algorithmPermissions & 0x01<<2) != 0) { if ((algorithmPermissions & 0x01<<2) != 0) {
worker = (faSize - uniforms > 0) ? (faSize - uniforms) : 0; worker = (faSize - uniforms > 0) ? (faSize - uniforms) : 0;
} }
qInfo()<<"工作人数==>"<<worker;
vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,worker,sSn,currentTime); vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,worker,sSn,currentTime);
if (resp->code!= 0) { if (resp->code!= 0) {
qInfo()<<"人数变化推送信息推送失败"; qInfo()<<"人数变化推送信息推送失败";
...@@ -611,6 +615,8 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -611,6 +615,8 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
qDebug() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId()<<sSn; qDebug() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId()<<sSn;
std::list<vides_data::faceRecognitionResult>faces; std::list<vides_data::faceRecognitionResult>faces;
faceReconitionHandle->doesItExistEmployee(frame,faces); faceReconitionHandle->doesItExistEmployee(frame,faces);
qInfo()<<"识别的人脸数量==>"<<faces.size();
if (faces.size()>0) { if (faces.size()>0) {
for(auto face:faces){ for(auto face:faces){
vides_data::requestFaceReconition faceReconition; vides_data::requestFaceReconition faceReconition;
...@@ -706,7 +712,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -706,7 +712,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
if(countValue==0 ){ if(countValue==0 ){
vides_data::requestLicensePlate initPlate; vides_data::requestLicensePlate initPlate;
initPlate.sn=sSn; initPlate.sn=sSn;
licensePlateRecogn.licensePlateNumber(frame, lpNumber,initPlate,currentTime); algorithmTaskManage.executeFindlicensePlateNumber(frame, lpNumber,initPlate,currentTime,sSn);
if(initPlate.plates.size()==0){ if(initPlate.plates.size()==0){
batchRegionalPushLicensePlate(imgs,currentTime,initPlate); batchRegionalPushLicensePlate(imgs,currentTime,initPlate);
if(initPlate.plates.size()>0){ if(initPlate.plates.size()>0){
...@@ -730,9 +736,9 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -730,9 +736,9 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
} }
vides_data::requestLicensePlate resultPlate; vides_data::requestLicensePlate resultPlate;
resultPlate.sn=sSn; resultPlate.sn=sSn;
licensePlateRecogn.licensePlateNumber(areaMat, lpNumber,resultPlate,currentTime); algorithmTaskManage.executeFindlicensePlateNumber(areaMat, lpNumber,resultPlate,currentTime,sSn);
std::list<vides_data::LicensePlate>ps =resultPlate.plates; std::list<vides_data::LicensePlate>ps =resultPlate.plates;
qDebug()<<QString("sn==>%1,区域:%2识别的车牌信息是:%3").arg(sSn).arg(key). qDebug()<<QString("sn==>%1,区域:%2识别的车牌信息是:%3").arg(sSn).arg(key).
arg(lpNumber); arg(lpNumber);
...@@ -764,7 +770,9 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -764,7 +770,9 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
continue; continue;
} }
vides_data::LicensePlate maxPlate; vides_data::LicensePlate maxPlate;
licensePlateRecogn.filterLicensePlateConfidenceMax(resultPlate,maxPlate); LicensePlateRecognition licensePlateRecognitionNew;
licensePlateRecognitionNew.filterLicensePlateConfidenceMax(resultPlate,maxPlate);
RecognizedInfo recognizedInfo; RecognizedInfo recognizedInfo;
if (maxPlate.new_color=="蓝牌" && maxPlate.new_plate.length() != 7) { if (maxPlate.new_color=="蓝牌" && maxPlate.new_plate.length() != 7) {
......
...@@ -76,16 +76,19 @@ void HumanDetection::setHuManParameter(float &height_reference,int &uniformColor ...@@ -76,16 +76,19 @@ void HumanDetection::setHuManParameter(float &height_reference,int &uniformColor
} }
qint64 HumanDetection::getThreadTime()const{ qint64 HumanDetection::getThreadTime()const{
return thread_time.load(std::memory_order_relaxed); return thread_time.load(std::memory_order_acquire);
} }
bool HumanDetection::getIsRunning()const{ bool HumanDetection::getIsRunning()const{
return isRunning.load(std::memory_order_relaxed); return isRunning.load(std::memory_order_acquire);
}
void HumanDetection::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_acquire);
} }
//0 人形 1 车形 2 工服 //0 人形 1 车形 2 工服
int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap, std::vector<vides_data::ParkingArea> &currentPlate) { int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap, std::vector<vides_data::ParkingArea> &currentPlate) {
isRunning.store(true, std::memory_order_relaxed); thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_acquire);
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_relaxed);
TCV_CameraStream *stream = TCV_CreateCameraStream(); TCV_CameraStream *stream = TCV_CreateCameraStream();
...@@ -94,7 +97,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int ...@@ -94,7 +97,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
// 释放相机流 // 释放相机流
TCV_ReleaseCameraStream(stream); TCV_ReleaseCameraStream(stream);
isRunning.store(false, std::memory_order_relaxed); isRunning.store(false, std::memory_order_acquire);
}); });
...@@ -109,6 +112,8 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int ...@@ -109,6 +112,8 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
if (res == 0x00 || res == 0x02) { if (res == 0x00 || res == 0x02) {
num = TCV_HumanDetectorGetNumOfHuman(detector); num = TCV_HumanDetectorGetNumOfHuman(detector);
qInfo()<<"TCV_HumanDetectorGetNumOfHuman==>"<<num;
if (num == 0) return num; // 无行人检测结果,提前返回 if (num == 0) return num; // 无行人检测结果,提前返回
std::vector<TCV_ObjectLocation> results(num); std::vector<TCV_ObjectLocation> results(num);
TCV_HumanDetectorGetHumanLocation(detector, results.data(), num); TCV_HumanDetectorGetHumanLocation(detector, results.data(), num);
...@@ -120,6 +125,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int ...@@ -120,6 +125,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
int tenPlace = uniformColor / 10; // 十位 int tenPlace = uniformColor / 10; // 十位
int onePlace = uniformColor % 10; // 个位 int onePlace = uniformColor % 10; // 个位
if (std::abs(person.y2 - person.y1) >= heightReference) { if (std::abs(person.y2 - person.y1) >= heightReference) {
vides_data::ParkingArea area; vides_data::ParkingArea area;
area.topLeftCornerX=person.x1; area.topLeftCornerX=person.x1;
area.topLeftCornerY=person.y1; area.topLeftCornerY=person.y1;
...@@ -140,8 +146,8 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int ...@@ -140,8 +146,8 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
} }
reMap[0x02] = count_no_uniform; // 未穿工服的行人数量 reMap[0x02] = count_no_uniform; // 未穿工服的行人数量
reMap[0x00] = count_all; // 所有满足条件的行人数量 reMap[0x00] = count_all; // 所有满足条件的行人数量
num = res == 0x00 ?count_all:count_no_uniform; // 更新num为实际计数 num = (res == 0x00) ? count_all : count_no_uniform;
qDebug() << (res == 0 ? "findHuManCar 检测到的人数:" : "findHuManCar 未穿工服的人数:") << num;
} }
else if (res == 0x01) { else if (res == 0x01) {
num = TCV_HumanDetectorGetNumOfCar(detector); num = TCV_HumanDetectorGetNumOfCar(detector);
......
...@@ -27,7 +27,8 @@ public: ...@@ -27,7 +27,8 @@ public:
bool getIsRunning() const; bool getIsRunning() const;
void onTimeout(); void setIsRunning(bool running);
......
#include "Common.h" #include "Common.h"
#include "LicensePlateRecognition.h" #include "LicensePlateRecognition.h"
#include "ScopeSemaphoreExit.h"
LicensePlateRecognition::LicensePlateRecognition() {} LicensePlateRecognition::LicensePlateRecognition(const QString &modelPaths, float carConfidence) {
HLPR_ContextConfiguration configuration = {0};
QByteArray && by_mpath=modelPaths.toUtf8();
char* m_path=by_mpath.data();
configuration.models_path = m_path;
configuration.max_num = 5;
configuration.det_level = DETECT_LEVEL_LOW;
configuration.use_half = false;
configuration.nms_threshold = 0.5f;
configuration.rec_confidence_threshold = carConfidence;
configuration.box_conf_threshold = 0.30f;
configuration.threads = 1;
ctx = HLPR_CreateContext(&configuration);
}
LicensePlateRecognition::LicensePlateRecognition(){
}
LicensePlateRecognition::~LicensePlateRecognition(){ LicensePlateRecognition::~LicensePlateRecognition(){
HLPR_ReleaseContext(ctx); HLPR_ReleaseContext(ctx);
} }
LicensePlateRecognition* LicensePlateRecognition::instance = nullptr;
void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber){ void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber){
HLPR_ImageData data = {0}; HLPR_ImageData data = {0};
data.data = source.data; data.data = source.data;
...@@ -96,27 +112,22 @@ void LicensePlateRecognition::filterLicensePlateConfidenceMax(vides_data::reques ...@@ -96,27 +112,22 @@ void LicensePlateRecognition::filterLicensePlateConfidenceMax(vides_data::reques
} }
} }
} }
qint64 LicensePlateRecognition::getThreadTime()const{
return thread_time.load(std::memory_order_acquire);
}
void LicensePlateRecognition::initHlprContext(const QString &modelPaths, float carConfidence){ bool LicensePlateRecognition::getIsRunning()const{
if(ctx==nullptr){ return isRunning.load(std::memory_order_acquire);
HLPR_ContextConfiguration configuration = {0};
QByteArray && by_mpath=modelPaths.toUtf8();
char* m_path=by_mpath.data();
configuration.models_path = m_path;
configuration.max_num = 5;
configuration.det_level = DETECT_LEVEL_LOW;
configuration.use_half = false;
configuration.nms_threshold = 0.5f;
configuration.rec_confidence_threshold = carConfidence;
configuration.box_conf_threshold = 0.30f;
configuration.threads = 1;
ctx = HLPR_CreateContext(&configuration);
}
} }
void LicensePlateRecognition::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_acquire);
}
void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate, void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime) { qint64 currentTime) {
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_acquire);
// 执行一帧图像数据检测行人 // 执行一帧图像数据检测行人
// create ImageData // create ImageData
...@@ -128,7 +139,11 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString ...@@ -128,7 +139,11 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
data.rotation = CAMERA_ROTATION_0; data.rotation = CAMERA_ROTATION_0;
// create DataBuffer // create DataBuffer
P_HLPR_DataBuffer buffer = HLPR_CreateDataBuffer(&data); P_HLPR_DataBuffer buffer = HLPR_CreateDataBuffer(&data);
ScopeSemaphoreExit streamGuard([this, buffer]() {
isRunning.store(false, std::memory_order_acquire);
HLPR_ReleaseDataBuffer(buffer);
});
HREESULT ret = HLPR_ContextQueryStatus(ctx); HREESULT ret = HLPR_ContextQueryStatus(ctx);
if (ret != HResultCode::Ok) { if (ret != HResultCode::Ok) {
qInfo()<<QString("create error"); qInfo()<<QString("create error");
...@@ -139,7 +154,6 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString ...@@ -139,7 +154,6 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
HLPR_ContextUpdateStream(ctx, buffer, &results); HLPR_ContextUpdateStream(ctx, buffer, &results);
if (results.plate_size <= 0) { if (results.plate_size <= 0) {
HLPR_ReleaseDataBuffer(buffer);
return; return;
} }
...@@ -180,8 +194,7 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString ...@@ -180,8 +194,7 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
} }
lpNumber =lpResults.join("\t"); lpNumber =lpResults.join("\t");
HLPR_ReleaseDataBuffer(buffer);
} }
...@@ -8,19 +8,15 @@ ...@@ -8,19 +8,15 @@
#include <QTextStream> #include <QTextStream>
#include <QFile> #include <QFile>
#include <QImage> #include <QImage>
#include <mutex>
#include <QRegularExpression> #include <QRegularExpression>
#include <atomic>
const std::vector<std::string> types = const std::vector<std::string> types =
{"蓝牌", "黄牌单层", "白牌单层", "绿牌新能源", "黑牌港澳", {"蓝牌", "黄牌单层", "白牌单层", "绿牌新能源", "黑牌港澳",
"香港单层", "香港双层", "澳门单层", "澳门双层", "黄牌双层"}; "香港单层", "香港双层", "澳门单层", "澳门双层", "黄牌双层"};
class LicensePlateRecognition{ class LicensePlateRecognition{
public: public:
static LicensePlateRecognition& getInstance()
{
static LicensePlateRecognition instance;
return instance;
}
//识别车牌号 //识别车牌号
void licensePlateNumber(const cv::Mat &source,QString & lpNumber, vides_data::requestLicensePlate &plate, void licensePlateNumber(const cv::Mat &source,QString & lpNumber, vides_data::requestLicensePlate &plate,
qint64 currentTime); qint64 currentTime);
...@@ -28,19 +24,30 @@ public: ...@@ -28,19 +24,30 @@ public:
void filterLicensePlateConfidenceMax(vides_data::requestLicensePlate &plate,vides_data::LicensePlate &max); void filterLicensePlateConfidenceMax(vides_data::requestLicensePlate &plate,vides_data::LicensePlate &max);
void oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber); void oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber);
void initHlprContext(const QString &modelPaths,float carConfidence);
void replaceWith1And0( QString &code); void replaceWith1And0( QString &code);
private:
static LicensePlateRecognition* instance;
P_HLPR_Context ctx=nullptr ;
LicensePlateRecognition(const QString &modelPaths,float carConfidence);
LicensePlateRecognition(); LicensePlateRecognition();
~LicensePlateRecognition(); ~LicensePlateRecognition();
qint64 getThreadTime() const;
bool getIsRunning() const;
void setIsRunning(bool running);
private:
P_HLPR_Context ctx=nullptr ;
std::atomic<qint64> thread_time;
std::atomic<bool> isRunning{false};
}; };
......
...@@ -97,7 +97,7 @@ SOURCES += \ ...@@ -97,7 +97,7 @@ SOURCES += \
HumanDetection.cpp \ HumanDetection.cpp \
ScopeSemaphoreExit.cpp \ ScopeSemaphoreExit.cpp \
FaceReconitionHandle.cpp \ FaceReconitionHandle.cpp \
HumanDetectionManage.cpp AlgorithmTaskManage.cpp
HEADERS += \ HEADERS += \
Common.h \ Common.h \
...@@ -115,7 +115,7 @@ HEADERS += \ ...@@ -115,7 +115,7 @@ HEADERS += \
HumanDetection.h \ HumanDetection.h \
ScopeSemaphoreExit.h \ ScopeSemaphoreExit.h \
FaceReconitionHandle.h \ FaceReconitionHandle.h \
HumanDetectionManage.h AlgorithmTaskManage.h
#FORMS += \ #FORMS += \
# mainwindow.ui # mainwindow.ui
......
...@@ -52,11 +52,9 @@ MainWindow::MainWindow() ...@@ -52,11 +52,9 @@ MainWindow::MainWindow()
float carConfidence=qSetting->value("devices/carConfidence").toFloat(); float carConfidence=qSetting->value("devices/carConfidence").toFloat();
LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance(); AlgorithmTaskManage &algorithmTaskManage= AlgorithmTaskManage::getInstance(humanDetectionLen);
licensePlateRecogn.initHlprContext(modelPaths,carConfidence); algorithmTaskManage.initHumanDetectionManage(modelPaths,carShapeConfidence,heightReference,uniformColor);
algorithmTaskManage.initLicensePlateManage(modelPaths,carConfidence);
HumanDetectionManage &humanDetectionManage= HumanDetectionManage::getInstance(humanDetectionLen);
humanDetectionManage.initHumanDetectionManage(modelPaths,carShapeConfidence,heightReference,uniformColor);
QString httpurl; QString httpurl;
QString profile=qSetting->value("cloudservice/profile","test").toString(); QString profile=qSetting->value("cloudservice/profile","test").toString();
if(strcmp(profile.toUtf8().data(),vides_data::PROFLIE_TEST)==0 ){ if(strcmp(profile.toUtf8().data(),vides_data::PROFLIE_TEST)==0 ){
......
...@@ -2,12 +2,11 @@ ...@@ -2,12 +2,11 @@
#define MAINWINDOW_H #define MAINWINDOW_H
#include "Common.h" #include "Common.h"
#include "LicensePlateRecognition.h"
#include "CameraHandle.h" #include "CameraHandle.h"
#include "HttpService.h" #include "HttpService.h"
#include "VidesData.h" #include "VidesData.h"
#include "MediaFaceImage.h" #include "MediaFaceImage.h"
#include "HumanDetectionManage.h" #include "AlgorithmTaskManage.h"
#include <algorithm> #include <algorithm>
#include <QString> #include <QString>
#include <QTextCodec> #include <QTextCodec>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment