Commit fe54e885 by “liusq”

调度算法修改为通用模版

parent 54c45fe9
......@@ -34,8 +34,6 @@ void AlgorithmTaskManage::initFaceReconitionHandle(std::map<QString,QString>&map
}
void AlgorithmTaskManage::modifyImageFeature(std::map<QString,QString>&maps,int numberFaces,float confidence,bool isNull){
static int i=0;
printf("modifyImageFeature调用次数%d次\n", ++i);
std::lock_guard<std::mutex> lock(mtxFace);
for (FaceReconitionHandle* face : faceReconitionHandles) {
face->setImageChanged(true);
......@@ -63,128 +61,17 @@ AlgorithmTaskManage::~AlgorithmTaskManage(){
void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) {
if (scheType == 0x01) {
std::lock_guard<std::mutex> lock(mtxHuman);
// 获取当前时间作为基准
qint64 currentTime = QDateTime::currentSecsSinceEpoch();
qint64 maxWaitTime = 0;
// 记录最大等待时间的对象数量
int maxWaitTimeCount = 0;
std::vector<HumanDetection*> schedulableObjects;
// 遍历humanDetections,找到所有等待时间相同的未执行的HumanDetection对象
for (HumanDetection* human : humanDetections) {
if (human->getIsRunning()) continue;
// 计算此对象自上次执行以来的等待时间
qint64 waitTime = std::abs(currentTime - human->getThreadTime());
if (waitTime > maxWaitTime) {
schedulableObjects.clear();
schedulableObjects.push_back(human);
maxWaitTime = waitTime;
maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(human);
maxWaitTimeCount++;
}
}
// 如果最大等待时间的对象数量为1,直接返回
if (maxWaitTimeCount == 1) {
return schedulableObjects.at(0);
}
if (schedulableObjects.empty()) {
return nullptr; // 如果没有可调度对象,返回 nullptr 或进行适当处理
}
// 在可调度的对象中随机选择一个
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)];
return schedulingAlgorithmTemplate(humanDetections, mtxHuman);
} else if (scheType == 0x02) {
std::lock_guard<std::mutex> lock(mtxLicense);
// 获取当前时间作为基准
qint64 currentTime = QDateTime::currentSecsSinceEpoch();
qint64 maxWaitTime = 0;
// 记录最大等待时间的对象数量
int maxWaitTimeCount = 0;
std::vector<LicensePlateRecognition*> schedulableObjects;
// 遍历licensePlateRecognitions,找到所有等待时间相同的未执行的LicensePlateRecognition对象
for (LicensePlateRecognition* licensePlateRecognition : licensePlateRecognitions) {
if (licensePlateRecognition->getIsRunning()) continue;
// 计算此对象自上次执行以来的等待时间
qint64 waitTime = std::abs(currentTime - licensePlateRecognition->getThreadTime());
if (waitTime > maxWaitTime) {
schedulableObjects.clear();
schedulableObjects.push_back(licensePlateRecognition);
maxWaitTime = waitTime;
maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(licensePlateRecognition);
maxWaitTimeCount++;
}
}
// 如果最大等待时间的对象数量为1,直接返回
if (maxWaitTimeCount == 1) {
return schedulableObjects.at(0);
}
if (schedulableObjects.empty()) {
return nullptr; // 如果没有可调度对象,返回 nullptr 或进行适当处理
}
// 在可调度的对象中随机选择一个
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)];
}else if (scheType==0x03) {
std::lock_guard<std::mutex> lock(mtxFace);
// 获取当前时间作为基准
qint64 currentTime = QDateTime::currentSecsSinceEpoch();
qint64 maxWaitTime = 0;
// 记录最大等待时间的对象数量
int maxWaitTimeCount = 0;
std::vector<FaceReconitionHandle*> schedulableObjects;
// 遍历faceReconitionHandles,找到所有等待时间相同的未执行的FaceReconitionHandle对象
for (FaceReconitionHandle* face : faceReconitionHandles) {
if (face->getIsRunning()) continue;
// 计算此对象自上次执行以来的等待时间
qint64 waitTime = std::abs(currentTime - face->getThreadTime());
if (waitTime > maxWaitTime) {
schedulableObjects.clear();
schedulableObjects.push_back(face);
maxWaitTime = waitTime;
maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(face);
maxWaitTimeCount++;
}
}
// 如果最大等待时间的对象数量为1,直接返回
if (maxWaitTimeCount == 1) {
return schedulableObjects.at(0);
}
if (schedulableObjects.empty()) {
return nullptr; // 如果没有可调度对象,返回 nullptr 或进行适当处理
}
// 在可调度的对象中随机选择一个
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)];
}
else {
return schedulingAlgorithmTemplate(licensePlateRecognitions, mtxLicense);
} else if (scheType == 0x03) {
return schedulingAlgorithmTemplate(faceReconitionHandles, mtxFace);
} else {
qInfo() << "参数错误";
return nullptr;
}
}
void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces,QString sSn){
faceSemaphore.acquire();
ScopeSemaphoreExit guard([this]() {
......@@ -197,7 +84,7 @@ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,s
qInfo() << "人脸识别算法抢到===>sn"<<sSn<<selectedFaceReconition;
selectedFaceReconition->doesItExistEmployee(source, faces);
} else {
qDebug() << "没有可用的LicensePlateRecognition对象可以调度";
qInfo() << "没有可用的LicensePlateRecognition对象可以调度";
return ;
}
}
......@@ -216,7 +103,7 @@ void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, Q
qInfo() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate;
selectedLicensePlate->licensePlateNumber(source, lpNumber,plate, currentTime);
} else {
qDebug() << "没有可用的LicensePlateRecognition对象可以调度";
qInfo() << "没有可用的LicensePlateRecognition对象可以调度";
return ;
}
}
......@@ -236,7 +123,7 @@ int AlgorithmTaskManage::executeFindHuManCar(const cv::Mat &source, int res,
int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, currentPlate);
return detectionResult;
} else {
qDebug() << "没有可用的HumanDetection对象可以调度";
qInfo() << "没有可用的HumanDetection对象可以调度";
return -2;
}
}
......@@ -31,6 +31,8 @@ public:
void initFaceReconitionHandle(std::map<QString,QString>&maps,int numberFaces,float confidence);
void *schedulingAlgorithm(int scheType);
int executeFindHuManCar(const cv::Mat &source,int res,std::vector<vides_data::ParkingArea> &currentPlate,
std::map<int,int>&resMap, QString sSn);
......@@ -42,6 +44,42 @@ public:
private:
template<typename T>
T* schedulingAlgorithmTemplate(std::vector<T*>& objects, std::mutex& mtx) {
std::lock_guard<std::mutex> lock(mtx);
qint64 currentTime = QDateTime::currentSecsSinceEpoch();
qint64 maxWaitTime = 0;
int maxWaitTimeCount = 0;
std::vector<T*> schedulableObjects;
for (T* obj : objects) {
if (obj->getIsRunning()) continue;
qint64 waitTime = std::abs(currentTime - obj->getThreadTime());
if (waitTime > maxWaitTime) {
schedulableObjects.clear();
schedulableObjects.push_back(obj);
maxWaitTime = waitTime;
maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(obj);
maxWaitTimeCount++;
}
}
if (maxWaitTimeCount == 1) {
return schedulableObjects.at(0);
}
if (schedulableObjects.empty()) {
return nullptr;
}
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)];
}
static AlgorithmTaskManage* instance;
std::vector<HumanDetection*>humanDetections;
......
......@@ -11,14 +11,11 @@ using namespace cimg_library;
FaceReconitionHandle::FaceReconitionHandle() {
static int ii=0;
printf("FaceReconitionHandle 创建调用次数%d次\n", ++ii);
}
FaceReconitionHandle::~FaceReconitionHandle(){
if (ctxHandle != nullptr) {
static int ii=0;
printf("FaceReconitionHandle 销毁调用次数%d次\n", ++ii);
HF_ReleaseFaceContext(ctxHandle);
ctxHandle = nullptr;
}
......@@ -32,7 +29,7 @@ bool FaceReconitionHandle::getIsRunning() const{
}
void FaceReconitionHandle::setIsRunning(bool running){
this->isRunning.store(running, std::memory_order_acquire);
this->isRunning.store(running, std::memory_order_release);
}
bool FaceReconitionHandle::getImageChanged()const{
......@@ -40,7 +37,7 @@ bool FaceReconitionHandle::getImageChanged()const{
}
void FaceReconitionHandle::setImageChanged(bool imageChanged){
this->isImageChanged.store(imageChanged, std::memory_order_acquire);
this->isImageChanged.store(imageChanged, std::memory_order_release);
}
cv::Mat FaceReconitionHandle::loadImage(const QString &path) {
......@@ -57,7 +54,7 @@ cv::Mat FaceReconitionHandle::loadImage(const QString &path) {
void FaceReconitionHandle::initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence){
ScopeSemaphoreExit streamGuard([this]() {
isImageChanged.store(false, std::memory_order_acquire);
isImageChanged.store(false, std::memory_order_release);
});
featureRemove();
HResult ret;
......@@ -204,9 +201,9 @@ cv::Mat FaceReconitionHandle::loadImageFromByteStream(const QString& filePath) {
void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_acquire);
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
ScopeSemaphoreExit streamGuard([this]() {
isRunning.store(false, std::memory_order_acquire);
isRunning.store(false, std::memory_order_release);
});
HResult ret;
HF_ContextCustomParameter parameter = {0};
......
......@@ -84,11 +84,11 @@ bool HumanDetection::getIsRunning()const{
}
void HumanDetection::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_acquire);
this->isRunning.store(running, std::memory_order_release);
}
//0 人形 1 车形 2 工服
int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap, std::vector<vides_data::ParkingArea> &currentPlate) {
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_acquire);
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
TCV_CameraStream *stream = TCV_CreateCameraStream();
......@@ -97,7 +97,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
// 释放相机流
TCV_ReleaseCameraStream(stream);
isRunning.store(false, std::memory_order_acquire);
isRunning.store(false, std::memory_order_release);
});
......@@ -125,20 +125,19 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
int tenPlace = uniformColor / 10; // 十位
int onePlace = uniformColor % 10; // 个位
if (std::abs(person.y2 - person.y1) >= heightReference) {
vides_data::ParkingArea area;
area.topLeftCornerX=person.x1;
area.topLeftCornerY=person.y1;
area.bottomLeftCornerX=person.x1;
area.bottomLeftCornerY=person.y2;
area.topRightCornerX=person.x2;
area.topRightCornerY=person.y1;
area.bottomRightCornerX=person.x2;
area.bottomRightCornerY=person.y2;
currentPlate.push_back(area);
++count_all;
//工服
if(person.uniform != tenPlace && person.uniform != onePlace){
vides_data::ParkingArea area;
area.topLeftCornerX=person.x1;
area.topLeftCornerY=person.y1;
area.bottomLeftCornerX=person.x1;
area.bottomLeftCornerY=person.y2;
area.topRightCornerX=person.x2;
area.topRightCornerY=person.y1;
area.bottomRightCornerX=person.x2;
area.bottomRightCornerY=person.y2;
currentPlate.push_back(area);
++count_no_uniform;
}
......
......@@ -121,13 +121,13 @@ bool LicensePlateRecognition::getIsRunning()const{
}
void LicensePlateRecognition::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_acquire);
this->isRunning.store(running, std::memory_order_release);
}
void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime) {
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_acquire);
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
// 执行一帧图像数据检测行人
// create ImageData
......@@ -140,7 +140,7 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
// create DataBuffer
P_HLPR_DataBuffer buffer = HLPR_CreateDataBuffer(&data);
ScopeSemaphoreExit streamGuard([this, buffer]() {
isRunning.store(false, std::memory_order_acquire);
isRunning.store(false, std::memory_order_release);
HLPR_ReleaseDataBuffer(buffer);
});
......
......@@ -12,7 +12,7 @@ TEMPLATE = app
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.0.3\\\"
DEFINES += APP_VERSION=\\\"1.1.0\\\"
QMAKE_LIBDIR += /usr/local/lib
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment