Commit fe54e885 by “liusq”

调度算法修改为通用模版

parent 54c45fe9
...@@ -34,8 +34,6 @@ void AlgorithmTaskManage::initFaceReconitionHandle(std::map<QString,QString>&map ...@@ -34,8 +34,6 @@ void AlgorithmTaskManage::initFaceReconitionHandle(std::map<QString,QString>&map
} }
void AlgorithmTaskManage::modifyImageFeature(std::map<QString,QString>&maps,int numberFaces,float confidence,bool isNull){ void AlgorithmTaskManage::modifyImageFeature(std::map<QString,QString>&maps,int numberFaces,float confidence,bool isNull){
static int i=0;
printf("modifyImageFeature调用次数%d次\n", ++i);
std::lock_guard<std::mutex> lock(mtxFace); std::lock_guard<std::mutex> lock(mtxFace);
for (FaceReconitionHandle* face : faceReconitionHandles) { for (FaceReconitionHandle* face : faceReconitionHandles) {
face->setImageChanged(true); face->setImageChanged(true);
...@@ -63,128 +61,17 @@ AlgorithmTaskManage::~AlgorithmTaskManage(){ ...@@ -63,128 +61,17 @@ AlgorithmTaskManage::~AlgorithmTaskManage(){
void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) { void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) {
if (scheType == 0x01) { if (scheType == 0x01) {
std::lock_guard<std::mutex> lock(mtxHuman); return schedulingAlgorithmTemplate(humanDetections, mtxHuman);
// 获取当前时间作为基准
qint64 currentTime = QDateTime::currentSecsSinceEpoch();
qint64 maxWaitTime = 0;
// 记录最大等待时间的对象数量
int maxWaitTimeCount = 0;
std::vector<HumanDetection*> schedulableObjects;
// 遍历humanDetections,找到所有等待时间相同的未执行的HumanDetection对象
for (HumanDetection* human : humanDetections) {
if (human->getIsRunning()) continue;
// 计算此对象自上次执行以来的等待时间
qint64 waitTime = std::abs(currentTime - human->getThreadTime());
if (waitTime > maxWaitTime) {
schedulableObjects.clear();
schedulableObjects.push_back(human);
maxWaitTime = waitTime;
maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(human);
maxWaitTimeCount++;
}
}
// 如果最大等待时间的对象数量为1,直接返回
if (maxWaitTimeCount == 1) {
return schedulableObjects.at(0);
}
if (schedulableObjects.empty()) {
return nullptr; // 如果没有可调度对象,返回 nullptr 或进行适当处理
}
// 在可调度的对象中随机选择一个
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)];
} else if (scheType == 0x02) { } else if (scheType == 0x02) {
std::lock_guard<std::mutex> lock(mtxLicense); return schedulingAlgorithmTemplate(licensePlateRecognitions, mtxLicense);
// 获取当前时间作为基准 } else if (scheType == 0x03) {
qint64 currentTime = QDateTime::currentSecsSinceEpoch(); return schedulingAlgorithmTemplate(faceReconitionHandles, mtxFace);
} else {
qint64 maxWaitTime = 0;
// 记录最大等待时间的对象数量
int maxWaitTimeCount = 0;
std::vector<LicensePlateRecognition*> schedulableObjects;
// 遍历licensePlateRecognitions,找到所有等待时间相同的未执行的LicensePlateRecognition对象
for (LicensePlateRecognition* licensePlateRecognition : licensePlateRecognitions) {
if (licensePlateRecognition->getIsRunning()) continue;
// 计算此对象自上次执行以来的等待时间
qint64 waitTime = std::abs(currentTime - licensePlateRecognition->getThreadTime());
if (waitTime > maxWaitTime) {
schedulableObjects.clear();
schedulableObjects.push_back(licensePlateRecognition);
maxWaitTime = waitTime;
maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(licensePlateRecognition);
maxWaitTimeCount++;
}
}
// 如果最大等待时间的对象数量为1,直接返回
if (maxWaitTimeCount == 1) {
return schedulableObjects.at(0);
}
if (schedulableObjects.empty()) {
return nullptr; // 如果没有可调度对象,返回 nullptr 或进行适当处理
}
// 在可调度的对象中随机选择一个
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)];
}else if (scheType==0x03) {
std::lock_guard<std::mutex> lock(mtxFace);
// 获取当前时间作为基准
qint64 currentTime = QDateTime::currentSecsSinceEpoch();
qint64 maxWaitTime = 0;
// 记录最大等待时间的对象数量
int maxWaitTimeCount = 0;
std::vector<FaceReconitionHandle*> schedulableObjects;
// 遍历faceReconitionHandles,找到所有等待时间相同的未执行的FaceReconitionHandle对象
for (FaceReconitionHandle* face : faceReconitionHandles) {
if (face->getIsRunning()) continue;
// 计算此对象自上次执行以来的等待时间
qint64 waitTime = std::abs(currentTime - face->getThreadTime());
if (waitTime > maxWaitTime) {
schedulableObjects.clear();
schedulableObjects.push_back(face);
maxWaitTime = waitTime;
maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(face);
maxWaitTimeCount++;
}
}
// 如果最大等待时间的对象数量为1,直接返回
if (maxWaitTimeCount == 1) {
return schedulableObjects.at(0);
}
if (schedulableObjects.empty()) {
return nullptr; // 如果没有可调度对象,返回 nullptr 或进行适当处理
}
// 在可调度的对象中随机选择一个
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)];
}
else {
qInfo() << "参数错误"; qInfo() << "参数错误";
return nullptr; return nullptr;
} }
} }
void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces,QString sSn){ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces,QString sSn){
faceSemaphore.acquire(); faceSemaphore.acquire();
ScopeSemaphoreExit guard([this]() { ScopeSemaphoreExit guard([this]() {
...@@ -197,7 +84,7 @@ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,s ...@@ -197,7 +84,7 @@ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,s
qInfo() << "人脸识别算法抢到===>sn"<<sSn<<selectedFaceReconition; qInfo() << "人脸识别算法抢到===>sn"<<sSn<<selectedFaceReconition;
selectedFaceReconition->doesItExistEmployee(source, faces); selectedFaceReconition->doesItExistEmployee(source, faces);
} else { } else {
qDebug() << "没有可用的LicensePlateRecognition对象可以调度"; qInfo() << "没有可用的LicensePlateRecognition对象可以调度";
return ; return ;
} }
} }
...@@ -216,7 +103,7 @@ void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, Q ...@@ -216,7 +103,7 @@ void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, Q
qInfo() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate; qInfo() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate;
selectedLicensePlate->licensePlateNumber(source, lpNumber,plate, currentTime); selectedLicensePlate->licensePlateNumber(source, lpNumber,plate, currentTime);
} else { } else {
qDebug() << "没有可用的LicensePlateRecognition对象可以调度"; qInfo() << "没有可用的LicensePlateRecognition对象可以调度";
return ; return ;
} }
} }
...@@ -236,7 +123,7 @@ int AlgorithmTaskManage::executeFindHuManCar(const cv::Mat &source, int res, ...@@ -236,7 +123,7 @@ int AlgorithmTaskManage::executeFindHuManCar(const cv::Mat &source, int res,
int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, currentPlate); int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, currentPlate);
return detectionResult; return detectionResult;
} else { } else {
qDebug() << "没有可用的HumanDetection对象可以调度"; qInfo() << "没有可用的HumanDetection对象可以调度";
return -2; return -2;
} }
} }
...@@ -32,6 +32,8 @@ public: ...@@ -32,6 +32,8 @@ public:
void *schedulingAlgorithm(int scheType); void *schedulingAlgorithm(int scheType);
int executeFindHuManCar(const cv::Mat &source,int res,std::vector<vides_data::ParkingArea> &currentPlate, int executeFindHuManCar(const cv::Mat &source,int res,std::vector<vides_data::ParkingArea> &currentPlate,
std::map<int,int>&resMap, QString sSn); std::map<int,int>&resMap, QString sSn);
...@@ -42,6 +44,42 @@ public: ...@@ -42,6 +44,42 @@ public:
private: private:
template<typename T>
T* schedulingAlgorithmTemplate(std::vector<T*>& objects, std::mutex& mtx) {
std::lock_guard<std::mutex> lock(mtx);
qint64 currentTime = QDateTime::currentSecsSinceEpoch();
qint64 maxWaitTime = 0;
int maxWaitTimeCount = 0;
std::vector<T*> schedulableObjects;
for (T* obj : objects) {
if (obj->getIsRunning()) continue;
qint64 waitTime = std::abs(currentTime - obj->getThreadTime());
if (waitTime > maxWaitTime) {
schedulableObjects.clear();
schedulableObjects.push_back(obj);
maxWaitTime = waitTime;
maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(obj);
maxWaitTimeCount++;
}
}
if (maxWaitTimeCount == 1) {
return schedulableObjects.at(0);
}
if (schedulableObjects.empty()) {
return nullptr;
}
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)];
}
static AlgorithmTaskManage* instance; static AlgorithmTaskManage* instance;
std::vector<HumanDetection*>humanDetections; std::vector<HumanDetection*>humanDetections;
......
...@@ -536,11 +536,11 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -536,11 +536,11 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
qDebug()<<"=============================>"; qInfo()<<"=============================>";
int width = frame.cols; // 获取图像宽度 int width = frame.cols; // 获取图像宽度
int height = frame.rows; // 获取图像高度 int height = frame.rows; // 获取图像高度
int humanlen=instace.getHumanDetectionLen(); int humanlen=instace.getHumanDetectionLen();
qDebug()<<"frame 宽度:"<<width<<"frame 高度:"<<height; qInfo()<<"frame 宽度:"<<width<<"frame 高度:"<<height;
int licensePlateLen=instace.getLicensePlateLen(); int licensePlateLen=instace.getLicensePlateLen();
int faceLen=instace.getFaceLen(); int faceLen=instace.getFaceLen();
...@@ -603,10 +603,9 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -603,10 +603,9 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
faceMapWorker.setY(uniforms); faceMapWorker.setY(uniforms);
} }
} }
QElapsedTimer facetime;
facetime.start();
if(faSize>0 && (algorithmPermissions & 0x01<<1) != 0){ if(faSize>0 && (algorithmPermissions & 0x01<<1) != 0){
qDebug() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId()<<sSn; qInfo() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId()<<sSn;
std::list<vides_data::faceRecognitionResult>faces; std::list<vides_data::faceRecognitionResult>faces;
algorithmTaskManage.executeFindDoesItExistEmployee(frame,faces,sSn); algorithmTaskManage.executeFindDoesItExistEmployee(frame,faces,sSn);
qInfo()<<"识别的人脸数量==>"<<faces.size(); qInfo()<<"识别的人脸数量==>"<<faces.size();
...@@ -640,9 +639,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -640,9 +639,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
} }
} }
} }
qint64 faceTime = facetime.elapsed();
qInfo() << "faceRecognition:执行时间"<<faceTime / 1000;
if ((algorithmPermissions & 0x01<<2) != 0) { if ((algorithmPermissions & 0x01<<2) != 0) {
if(uniforms>0 ){ if(uniforms>0 ){
//未穿工服的人数 //未穿工服的人数
...@@ -716,7 +713,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -716,7 +713,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
} }
faceCount.fetch_add(1, std::memory_order_relaxed); faceCount.fetch_add(1, std::memory_order_relaxed);
qDebug()<<"faceCount==>"<<faceCount.load(std::memory_order_relaxed); qInfo()<<"faceCount==>"<<faceCount.load(std::memory_order_relaxed);
for (auto it = areaMat.begin(); it != areaMat.end(); ++it) { for (auto it = areaMat.begin(); it != areaMat.end(); ++it) {
int key = it->first; int key = it->first;
cv::Mat areaMat = it->second; cv::Mat areaMat = it->second;
...@@ -725,7 +722,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -725,7 +722,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
if (parkAreaMap != parkMap.end()) { if (parkAreaMap != parkMap.end()) {
value = parkAreaMap->second; // 成功找到,获取 value = parkAreaMap->second; // 成功找到,获取
} else { } else {
qDebug()<<sSn<<"==>区域不存在:"<<key; qInfo()<<sSn<<"==>区域不存在:"<<key;
continue; continue;
} }
vides_data::requestLicensePlate resultPlate; vides_data::requestLicensePlate resultPlate;
...@@ -734,7 +731,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -734,7 +731,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
algorithmTaskManage.executeFindlicensePlateNumber(areaMat, lpNumber,resultPlate,currentTime,sSn); algorithmTaskManage.executeFindlicensePlateNumber(areaMat, lpNumber,resultPlate,currentTime,sSn);
std::list<vides_data::LicensePlate>ps =resultPlate.plates; std::list<vides_data::LicensePlate>ps =resultPlate.plates;
qDebug()<<QString("sn==>%1,区域:%2识别的车牌信息是:%3").arg(sSn).arg(key). qInfo()<<QString("sn==>%1,区域:%2识别的车牌信息是:%3").arg(sSn).arg(key).
arg(lpNumber); arg(lpNumber);
if(ps.size()==0){ if(ps.size()==0){
int res=-1; int res=-1;
...@@ -756,7 +753,6 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -756,7 +753,6 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
current.new_plate=recognizedInfo.getLicensePlate(); current.new_plate=recognizedInfo.getLicensePlate();
current.time=recognizedInfo.getRecognizeTime(); current.time=recognizedInfo.getRecognizeTime();
newPlate.plates.push_back(std::move(current)); newPlate.plates.push_back(std::move(current));
qDebug()<<QString("当前进入ps.size()==0是当前校验返回结果是:%1").arg(res);
} }
}else{ }else{
int res =-1; int res =-1;
...@@ -770,13 +766,13 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -770,13 +766,13 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
RecognizedInfo recognizedInfo; RecognizedInfo recognizedInfo;
if (maxPlate.new_color=="蓝牌" && maxPlate.new_plate.length() != 7) { if (maxPlate.new_color=="蓝牌" && maxPlate.new_plate.length() != 7) {
qDebug()<<sSn<<"==>蓝牌车牌号:"<<maxPlate.new_plate<<"===>recognition.new_plate.length():"<<maxPlate.new_plate.length(); qInfo()<<sSn<<"==>蓝牌车牌号:"<<maxPlate.new_plate<<"===>recognition.new_plate.length():"<<maxPlate.new_plate.length();
continue; continue;
} else if (maxPlate.new_color=="绿牌新能源" && maxPlate.new_plate.length() != 8) { } else if (maxPlate.new_color=="绿牌新能源" && maxPlate.new_plate.length() != 8) {
qDebug()<<sSn<<"==>绿牌车牌号:"<<maxPlate.new_plate<<"===>recognition.new_plate.length():"<<maxPlate.new_plate.length(); qInfo()<<sSn<<"==>绿牌车牌号:"<<maxPlate.new_plate<<"===>recognition.new_plate.length():"<<maxPlate.new_plate.length();
continue; continue;
} else if (maxPlate.new_plate.length() < 7) { } else if (maxPlate.new_plate.length() < 7) {
qDebug()<<sSn<<"==>非绿牌蓝牌车牌号:"<<maxPlate.new_plate<<"===>recognition.new_plate.length():"<<maxPlate.new_plate.length(); qInfo()<<sSn<<"==>非绿牌蓝牌车牌号:"<<maxPlate.new_plate<<"===>recognition.new_plate.length():"<<maxPlate.new_plate.length();
continue; continue;
} }
if(maxPlate.text_confidence>=instace.getCarConfidenceMax()){ if(maxPlate.text_confidence>=instace.getCarConfidenceMax()){
...@@ -793,7 +789,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -793,7 +789,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
this->checkAndUpdateCurrentPlate(value,areaMat,recognizedInfo,res); this->checkAndUpdateCurrentPlate(value,areaMat,recognizedInfo,res);
} }
if(maxPlate.text_confidence<=instace.getCarConfidenceMin()){ if(maxPlate.text_confidence<=instace.getCarConfidenceMin()){
qDebug()<<sSn<<"==>recognition.text_confidence<=instace.getCarConfidenceMin"<<instace.getCarConfidenceMin(); qInfo()<<sSn<<"==>recognition.text_confidence<=instace.getCarConfidenceMin"<<instace.getCarConfidenceMin();
continue; continue;
} }
if(maxPlate.text_confidence>instace.getCarConfidenceMin() if(maxPlate.text_confidence>instace.getCarConfidenceMin()
...@@ -807,13 +803,12 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -807,13 +803,12 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
recognizedInfo=std::move(info); recognizedInfo=std::move(info);
this->checkAndUpdateCurrentPlate(value,areaMat,recognizedInfo,res); this->checkAndUpdateCurrentPlate(value,areaMat,recognizedInfo,res);
} }
qDebug()<<sSn<<"==>checkAndUpdateCurrentPlate结果是"<<res;
if (res == Exit || res == Mobilization) { if (res == Exit || res == Mobilization) {
maxPlate.areaLocation=value->getArea(); maxPlate.areaLocation=value->getArea();
maxPlate.img=imgs; maxPlate.img=imgs;
maxPlate.new_color=recognizedInfo.getColor(); maxPlate.new_color=recognizedInfo.getColor();
newPlate.plates.push_back(std::move(maxPlate)); newPlate.plates.push_back(std::move(maxPlate));
qDebug()<<QString("当前进入ps.size()>0 --> res == Exit || res == Mobilization 是当前校验返回结果是:%1").arg(res); qInfo()<<QString("当前进入ps.size()>0 --> res == Exit || res == Mobilization 是当前校验返回结果是:%1").arg(res);
} }
if(res==ExitAndMobilization){ if(res==ExitAndMobilization){
maxPlate.areaLocation=value->getArea(); maxPlate.areaLocation=value->getArea();
...@@ -1070,24 +1065,6 @@ void CameraHandle::faceUniformOverlap(std::map<QString, vides_data::requestFaceR ...@@ -1070,24 +1065,6 @@ void CameraHandle::faceUniformOverlap(std::map<QString, vides_data::requestFaceR
std::list<QString>& outUniforms) { std::list<QString>& outUniforms) {
const float epsilon = 1e-5; const float epsilon = 1e-5;
for (auto iter = mapFaces.begin(); iter != mapFaces.end(); ++iter) {
QString id = iter->first; // 人员id
vides_data::requestFaceReconition value = iter->second;
std::vector<cv::Point2f> faceAreaPoints = {
cv::Point2f(value.area.top_left_corner_x, value.area.top_left_corner_y),
cv::Point2f(value.area.top_right_corner_x, value.area.top_right_corner_y),
cv::Point2f(value.area.bottom_right_corner_x, value.area.bottom_right_corner_y),
cv::Point2f(value.area.bottom_left_corner_x, value.area.bottom_left_corner_y)
};
if (!isClockwise(faceAreaPoints)) {
std::reverse(faceAreaPoints.begin(), faceAreaPoints.end());
}
float maxIntersectionArea = 0.0;
int maxUniformIndex = -1;
for (size_t i = 0; i < uniforms.size(); ++i) { for (size_t i = 0; i < uniforms.size(); ++i) {
std::vector<cv::Point2f> uniformAreaPoints = { std::vector<cv::Point2f> uniformAreaPoints = {
cv::Point2f(uniforms[i].topLeftCornerX, uniforms[i].topLeftCornerY), cv::Point2f(uniforms[i].topLeftCornerX, uniforms[i].topLeftCornerY),
...@@ -1095,21 +1072,40 @@ void CameraHandle::faceUniformOverlap(std::map<QString, vides_data::requestFaceR ...@@ -1095,21 +1072,40 @@ void CameraHandle::faceUniformOverlap(std::map<QString, vides_data::requestFaceR
cv::Point2f(uniforms[i].bottomRightCornerX, uniforms[i].bottomRightCornerY), cv::Point2f(uniforms[i].bottomRightCornerX, uniforms[i].bottomRightCornerY),
cv::Point2f(uniforms[i].bottomLeftCornerX, uniforms[i].bottomLeftCornerY) cv::Point2f(uniforms[i].bottomLeftCornerX, uniforms[i].bottomLeftCornerY)
}; };
if (!isClockwise(uniformAreaPoints)) { if (!isClockwise(uniformAreaPoints)) {
std::reverse(uniformAreaPoints.begin(), uniformAreaPoints.end()); std::reverse(uniformAreaPoints.begin(), uniformAreaPoints.end());
} }
float maxIntersectionArea = 0.0;
QString maxFaceId;
for (auto iter = mapFaces.begin(); iter != mapFaces.end(); ++iter) {
QString faceId = iter->first; // 人员id
vides_data::requestFaceReconition faceValue = iter->second;
std::vector<cv::Point2f> faceAreaPoints = {
cv::Point2f(faceValue.area.top_left_corner_x, faceValue.area.top_left_corner_y),
cv::Point2f(faceValue.area.top_right_corner_x, faceValue.area.top_right_corner_y),
cv::Point2f(faceValue.area.bottom_right_corner_x, faceValue.area.bottom_right_corner_y),
cv::Point2f(faceValue.area.bottom_left_corner_x, faceValue.area.bottom_left_corner_y)
};
if (!isClockwise(faceAreaPoints)) {
std::reverse(faceAreaPoints.begin(), faceAreaPoints.end());
}
std::vector<cv::Point2f> intersection; std::vector<cv::Point2f> intersection;
float intersectionArea = cv::intersectConvexConvex(faceAreaPoints, uniformAreaPoints, intersection, true); float intersectionArea = cv::intersectConvexConvex(uniformAreaPoints, faceAreaPoints, intersection, true);
if (intersectionArea > maxIntersectionArea) { if (intersectionArea > maxIntersectionArea) {
maxIntersectionArea = intersectionArea; maxIntersectionArea = intersectionArea;
maxUniformIndex = static_cast<int>(i); maxFaceId = faceId;
} }
} }
if (maxUniformIndex != -1 && maxIntersectionArea > epsilon) { if (!maxFaceId.isEmpty() && maxIntersectionArea > epsilon) {
outUniforms.push_back(id); outUniforms.push_back(maxFaceId);
} }
} }
} }
......
...@@ -11,14 +11,11 @@ using namespace cimg_library; ...@@ -11,14 +11,11 @@ using namespace cimg_library;
FaceReconitionHandle::FaceReconitionHandle() { FaceReconitionHandle::FaceReconitionHandle() {
static int ii=0;
printf("FaceReconitionHandle 创建调用次数%d次\n", ++ii);
} }
FaceReconitionHandle::~FaceReconitionHandle(){ FaceReconitionHandle::~FaceReconitionHandle(){
if (ctxHandle != nullptr) { if (ctxHandle != nullptr) {
static int ii=0;
printf("FaceReconitionHandle 销毁调用次数%d次\n", ++ii);
HF_ReleaseFaceContext(ctxHandle); HF_ReleaseFaceContext(ctxHandle);
ctxHandle = nullptr; ctxHandle = nullptr;
} }
...@@ -32,7 +29,7 @@ bool FaceReconitionHandle::getIsRunning() const{ ...@@ -32,7 +29,7 @@ bool FaceReconitionHandle::getIsRunning() const{
} }
void FaceReconitionHandle::setIsRunning(bool running){ void FaceReconitionHandle::setIsRunning(bool running){
this->isRunning.store(running, std::memory_order_acquire); this->isRunning.store(running, std::memory_order_release);
} }
bool FaceReconitionHandle::getImageChanged()const{ bool FaceReconitionHandle::getImageChanged()const{
...@@ -40,7 +37,7 @@ bool FaceReconitionHandle::getImageChanged()const{ ...@@ -40,7 +37,7 @@ bool FaceReconitionHandle::getImageChanged()const{
} }
void FaceReconitionHandle::setImageChanged(bool imageChanged){ void FaceReconitionHandle::setImageChanged(bool imageChanged){
this->isImageChanged.store(imageChanged, std::memory_order_acquire); this->isImageChanged.store(imageChanged, std::memory_order_release);
} }
cv::Mat FaceReconitionHandle::loadImage(const QString &path) { cv::Mat FaceReconitionHandle::loadImage(const QString &path) {
...@@ -57,7 +54,7 @@ cv::Mat FaceReconitionHandle::loadImage(const QString &path) { ...@@ -57,7 +54,7 @@ cv::Mat FaceReconitionHandle::loadImage(const QString &path) {
void FaceReconitionHandle::initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence){ void FaceReconitionHandle::initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence){
ScopeSemaphoreExit streamGuard([this]() { ScopeSemaphoreExit streamGuard([this]() {
isImageChanged.store(false, std::memory_order_acquire); isImageChanged.store(false, std::memory_order_release);
}); });
featureRemove(); featureRemove();
HResult ret; HResult ret;
...@@ -204,9 +201,9 @@ cv::Mat FaceReconitionHandle::loadImageFromByteStream(const QString& filePath) { ...@@ -204,9 +201,9 @@ cv::Mat FaceReconitionHandle::loadImageFromByteStream(const QString& filePath) {
void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_acquire); thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
ScopeSemaphoreExit streamGuard([this]() { ScopeSemaphoreExit streamGuard([this]() {
isRunning.store(false, std::memory_order_acquire); isRunning.store(false, std::memory_order_release);
}); });
HResult ret; HResult ret;
HF_ContextCustomParameter parameter = {0}; HF_ContextCustomParameter parameter = {0};
......
...@@ -84,11 +84,11 @@ bool HumanDetection::getIsRunning()const{ ...@@ -84,11 +84,11 @@ bool HumanDetection::getIsRunning()const{
} }
void HumanDetection::setIsRunning(bool running) { void HumanDetection::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_acquire); this->isRunning.store(running, std::memory_order_release);
} }
//0 人形 1 车形 2 工服 //0 人形 1 车形 2 工服
int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap, std::vector<vides_data::ParkingArea> &currentPlate) { int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap, std::vector<vides_data::ParkingArea> &currentPlate) {
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_acquire); thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
TCV_CameraStream *stream = TCV_CreateCameraStream(); TCV_CameraStream *stream = TCV_CreateCameraStream();
...@@ -97,7 +97,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int ...@@ -97,7 +97,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
// 释放相机流 // 释放相机流
TCV_ReleaseCameraStream(stream); TCV_ReleaseCameraStream(stream);
isRunning.store(false, std::memory_order_acquire); isRunning.store(false, std::memory_order_release);
}); });
...@@ -125,7 +125,9 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int ...@@ -125,7 +125,9 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
int tenPlace = uniformColor / 10; // 十位 int tenPlace = uniformColor / 10; // 十位
int onePlace = uniformColor % 10; // 个位 int onePlace = uniformColor % 10; // 个位
if (std::abs(person.y2 - person.y1) >= heightReference) { if (std::abs(person.y2 - person.y1) >= heightReference) {
++count_all;
//工服
if(person.uniform != tenPlace && person.uniform != onePlace){
vides_data::ParkingArea area; vides_data::ParkingArea area;
area.topLeftCornerX=person.x1; area.topLeftCornerX=person.x1;
area.topLeftCornerY=person.y1; area.topLeftCornerY=person.y1;
...@@ -136,9 +138,6 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int ...@@ -136,9 +138,6 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
area.bottomRightCornerX=person.x2; area.bottomRightCornerX=person.x2;
area.bottomRightCornerY=person.y2; area.bottomRightCornerY=person.y2;
currentPlate.push_back(area); currentPlate.push_back(area);
++count_all;
//工服
if(person.uniform != tenPlace && person.uniform != onePlace){
++count_no_uniform; ++count_no_uniform;
} }
......
...@@ -121,13 +121,13 @@ bool LicensePlateRecognition::getIsRunning()const{ ...@@ -121,13 +121,13 @@ bool LicensePlateRecognition::getIsRunning()const{
} }
void LicensePlateRecognition::setIsRunning(bool running) { void LicensePlateRecognition::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_acquire); this->isRunning.store(running, std::memory_order_release);
} }
void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate, void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime) { qint64 currentTime) {
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_acquire); thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
// 执行一帧图像数据检测行人 // 执行一帧图像数据检测行人
// create ImageData // create ImageData
...@@ -140,7 +140,7 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString ...@@ -140,7 +140,7 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
// create DataBuffer // create DataBuffer
P_HLPR_DataBuffer buffer = HLPR_CreateDataBuffer(&data); P_HLPR_DataBuffer buffer = HLPR_CreateDataBuffer(&data);
ScopeSemaphoreExit streamGuard([this, buffer]() { ScopeSemaphoreExit streamGuard([this, buffer]() {
isRunning.store(false, std::memory_order_acquire); isRunning.store(false, std::memory_order_release);
HLPR_ReleaseDataBuffer(buffer); HLPR_ReleaseDataBuffer(buffer);
}); });
......
...@@ -12,7 +12,7 @@ TEMPLATE = app ...@@ -12,7 +12,7 @@ TEMPLATE = app
# depend on your compiler). Please consult the documentation of the # depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it. # deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.0.3\\\" DEFINES += APP_VERSION=\\\"1.1.0\\\"
QMAKE_LIBDIR += /usr/local/lib QMAKE_LIBDIR += /usr/local/lib
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment