Commit c2bb647b by “liusq”

车牌,人形增加线程复用

parent 6b53f770
#include "HumanDetectionManage.h"
#include "AlgorithmTaskManage.h"
HumanDetectionManage::HumanDetectionManage(int humanDetectionLen):semaphore(humanDetectionLen){
AlgorithmTaskManage::AlgorithmTaskManage(int humanDetectionLen):semaphore(humanDetectionLen),
plateSemaphore(humanDetectionLen){
this->humanDetectionLen=humanDetectionLen;
}
void HumanDetectionManage::initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,float &height_reference,int &uniformColor) {
void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,float &height_reference,int &uniformColor) {
for (int i = 0; i < humanDetectionLen; ++i) {
HumanDetection* human=new HumanDetection(modelPaths,carShapeConfidence);
human->setHuManParameter(height_reference,uniformColor);
humanDetections.emplace_back(human);
}
}
HumanDetectionManage::~HumanDetectionManage(){
void AlgorithmTaskManage::initLicensePlateManage(const QString &modelPaths,
float carConfidence){
for (int i = 0; i < humanDetectionLen; ++i) {
LicensePlateRecognition* licensePlateRecognition=new LicensePlateRecognition(modelPaths,carConfidence);
licensePlateRecognitions.emplace_back(licensePlateRecognition);
}
}
AlgorithmTaskManage::~AlgorithmTaskManage(){
Common & instace= Common::getInstance();
for (auto task:humanDetections) {
instace.deleteObj(task);
}
for (auto plate:licensePlateRecognitions) {
instace.deleteObj(plate);
}
}
HumanDetection* HumanDetectionManage::schedulingAlgorithm(QString sSn) {
// 获取当前时间作为基准
qint64 currentTime = QDateTime::currentSecsSinceEpoch();
void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) {
if (scheType == 0x01) {
std::lock_guard<std::mutex> lock(mtxHuman);
// 获取当前时间作为基准
qint64 currentTime = QDateTime::currentSecsSinceEpoch();
qint64 maxWaitTime = 0;
// 记录最大等待时间的对象数量
int maxWaitTimeCount = 0;
std::vector<HumanDetection*> schedulableObjects;
// 遍历humanDetections,找到所有等待时间相同的未执行的HumanDetection对象
for (HumanDetection* human : humanDetections) {
if (human->getIsRunning()) continue;
// 创建一个vector来存储所有可调度的对象
std::vector<HumanDetection*> schedulableObjects;
// 计算此对象自上次执行以来的等待时间
qint64 waitTime = std::abs(currentTime - human->getThreadTime());
if (waitTime > maxWaitTime) {
schedulableObjects.clear();
schedulableObjects.push_back(human);
maxWaitTime = waitTime;
maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(human);
maxWaitTimeCount++;
}
}
// 如果最大等待时间的对象数量为1,直接返回
if (maxWaitTimeCount == 1) {
return schedulableObjects.at(0);
}
if (schedulableObjects.empty()) {
return nullptr; // 如果没有可调度对象,返回 nullptr 或进行适当处理
}
// 在可调度的对象中随机选择一个
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
qint64 maxWaitTime = 0;
return schedulableObjects[dis(gen)];
} else if (scheType == 0x02) {
std::lock_guard<std::mutex> lock(mtxLicense);
// 获取当前时间作为基准
qint64 currentTime = QDateTime::currentSecsSinceEpoch();
// 记录最大等待时间的对象数量
int maxWaitTimeCount = 0;
qint64 maxWaitTime = 0;
// 记录最大等待时间的对象数量
int maxWaitTimeCount = 0;
std::vector<LicensePlateRecognition*> schedulableObjects;
// 遍历humanDetections,找到所有等待时间相同的未执行的HumanDetection对象
for (HumanDetection* human : humanDetections) {
if (human->getIsRunning()) continue;
// 遍历licensePlateRecognitions,找到所有等待时间相同的未执行的LicensePlateRecognition对象
for (LicensePlateRecognition* licensePlateRecognition : licensePlateRecognitions) {
if (licensePlateRecognition->getIsRunning()) continue;
// 计算此对象自上次执行以来的等待时间
qint64 waitTime = std::abs(currentTime - human->getThreadTime());
if (waitTime > maxWaitTime) {
schedulableObjects.clear();
schedulableObjects.push_back(human);
maxWaitTime = waitTime;
maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(human);
maxWaitTimeCount++;
// 计算此对象自上次执行以来的等待时间
qint64 waitTime = std::abs(currentTime - licensePlateRecognition->getThreadTime());
if (waitTime > maxWaitTime) {
schedulableObjects.clear();
schedulableObjects.push_back(licensePlateRecognition);
maxWaitTime = waitTime;
maxWaitTimeCount = 1;
} else if (waitTime == maxWaitTime) {
schedulableObjects.push_back(licensePlateRecognition);
maxWaitTimeCount++;
}
}
}
// 如果最大等待时间的对象数量为1,直接返回
if (maxWaitTimeCount == 1) {
return schedulableObjects.at(0);
}
if (schedulableObjects.empty()) {
return nullptr; // 如果没有可调度对象,返回 nullptr 或进行适当处理
}
// 在可调度的对象中随机选择一个
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
// 如果最大等待时间的对象数量为1,直接返回
if (maxWaitTimeCount == 1) {
return schedulableObjects.at(0);
}
if (schedulableObjects.empty()) {
return nullptr; // 如果没有可调度对象,返回 nullptr 或进行适当处理
}
// 在可调度的对象中随机选择一个
std::random_device rd;
std::mt19937 gen(rd());
std::uniform_int_distribution<> dis(0, schedulableObjects.size() - 1);
return schedulableObjects[dis(gen)];
return schedulableObjects[dis(gen)];
} else {
qInfo() << "参数错误";
return nullptr;
}
}
int HumanDetectionManage::executeFindHuManCar(const cv::Mat &source, int res,
std::vector<vides_data::ParkingArea> &currentPlate,std::map<int,int>&resMap,QString sSn) {
void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime,QString sSn){
plateSemaphore.acquire();
ScopeSemaphoreExit guard([this]() {
plateSemaphore.release(); // 释放信号量
});
auto* selectedLicensePlate = static_cast<LicensePlateRecognition*>(schedulingAlgorithm(0x02));
if (selectedLicensePlate!=nullptr) {
selectedLicensePlate->setIsRunning(true);
// 调用选定对象的findHuManCar函数
qInfo() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate;
selectedLicensePlate->licensePlateNumber(source, lpNumber,plate, currentTime);
} else {
qDebug() << "没有可用的LicensePlateRecognition对象可以调度";
return ;
}
}
int AlgorithmTaskManage::executeFindHuManCar(const cv::Mat &source, int res,
std::vector<vides_data::ParkingArea> &currentPlate,std::map<int,int>&resMap,QString sSn) {
semaphore.acquire();
ScopeSemaphoreExit guard([this]() {
semaphore.release(); // 释放信号量
});
HumanDetection* selectedHumanDetection = schedulingAlgorithm(sSn);
auto* selectedHumanDetection = static_cast<HumanDetection*>(schedulingAlgorithm(0x01));
if (selectedHumanDetection!=nullptr) {
selectedHumanDetection->setIsRunning(true);
// 调用选定对象的findHuManCar函数
qInfo() << "调度算法抢到===>sn"<<sSn<<"res"<<res;
qInfo() << "人形调度算法抢到===>sn"<<sSn<<"res"<<selectedHumanDetection;
int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, currentPlate);
return detectionResult;
} else {
......
......@@ -4,42 +4,55 @@
#include "Common.h"
#include "VidesData.h"
#include "ScopeSemaphoreExit.h"
#include <QWaitCondition>
#include <QMutex>
#include "LicensePlateRecognition.h"
#include <QThread>
#include <random>
#include <QSemaphore>
#include <vector>
#include <opencv2/opencv.hpp>
class HumanDetectionManage{
#include <mutex>
class AlgorithmTaskManage{
public:
HumanDetectionManage(int humanDetectionLen);
~HumanDetectionManage();
static HumanDetectionManage& getInstance(int humanDetectionLen)
AlgorithmTaskManage(int humanDetectionLen);
~AlgorithmTaskManage();
static AlgorithmTaskManage& getInstance(int humanDetectionLen)
{
static HumanDetectionManage instance(humanDetectionLen);
static AlgorithmTaskManage instance(humanDetectionLen);
return instance;
}
void initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,float &height_reference,int &uniformColor);
void initLicensePlateManage(const QString &modelPaths,
float carConfidence);
int executeFindHuManCar(const cv::Mat &source,int res,std::vector<vides_data::ParkingArea> &currentPlate,
std::map<int,int>&resMap, QString sSn);
HumanDetection *schedulingAlgorithm(QString sSn);
void *schedulingAlgorithm(int scheType);
void executeFindlicensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime,QString sSn);
private:
static HumanDetectionManage* instance;
static AlgorithmTaskManage* instance;
std::vector<HumanDetection*>humanDetections;
std::vector<LicensePlateRecognition*>licensePlateRecognitions;
int humanDetectionLen;
QSemaphore semaphore;
QSemaphore plateSemaphore;
std::mutex mtxHuman;
QWaitCondition waitCondition;
std::mutex mtxLicense;
QMutex mutex;
};
......
#include "CameraHandle.h"
#include "TaskRunnable.h"
#include "HumanDetectionManage.h"
#include "AlgorithmTaskManage.h"
#include "ScopeSemaphoreExit.h"
#include <QElapsedTimer>
#include <QRegularExpression>
......@@ -447,11 +447,11 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
if(newInfo.getLicensePlate().length()<=0){
Common & instace= Common::getInstance();
int humanLen=instace.getHumanDetectionLen();
HumanDetectionManage &humanDetectionManage=HumanDetectionManage::getInstance(humanLen);
AlgorithmTaskManage &algorithmTaskManage=AlgorithmTaskManage::getInstance(humanLen);
std::vector<vides_data::ParkingArea> currentPlates;
std::map<int,int>resMap;
int car_size =humanDetectionManage.executeFindHuManCar(frame,0x01,currentPlates,resMap,sSn);
int car_size =algorithmTaskManage.executeFindHuManCar(frame,0x01,currentPlates,resMap,sSn);
qDebug()<<sSn<<":"<<"当前车形数量:"<<car_size;
if (car_size <= 0 && car_size!=-2) {
......@@ -547,8 +547,9 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
int height = frame.rows; // 获取图像高度
int humanlen=instace.getHumanDetectionLen();
qDebug()<<"frame 宽度:"<<width<<"frame 高度:"<<height;
HumanDetectionManage &humanDetectionManage=HumanDetectionManage::getInstance(humanlen);
LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance();
AlgorithmTaskManage &algorithmTaskManage=AlgorithmTaskManage::getInstance(humanlen);
std::map<QString,vides_data::requestFaceReconition> mapFaces;
......@@ -565,22 +566,24 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
//穿工服算法
if ((algorithmPermissions & 0x01<<2) != 0) {
uniforms=humanDetectionManage.executeFindHuManCar(frame,0x02,currentPlates,resMap,sSn);
if(uniforms!=0x00){
uniforms=algorithmTaskManage.executeFindHuManCar(frame,0x02,currentPlates,resMap,sSn);
if(currentPlates.size()>0x00){
faSize=resMap.at(0x00);
uniforms=resMap.at(0x02);
}
}else{
//人形
if((algorithmPermissions & 0x01<<1) != 0){
uniforms=humanDetectionManage.executeFindHuManCar(frame,0x00,currentPlates,resMap,sSn);
if(uniforms!=0x00){
uniforms=algorithmTaskManage.executeFindHuManCar(frame,0x00,currentPlates,resMap,sSn);
if(currentPlates.size()>0x00){
faSize=resMap.at(0x00);
uniforms=resMap.at(0x02);
uniforms=faSize;
}
}
}
qint64 elapsedTime = timer.elapsed();
qInfo()<<"人脸数量==>"<<faSize;
qInfo()<<"未穿工服数量==>"<<uniforms;
qInfo() << "humanDetectionManage.executeFindHuManCa:执行时间"<<elapsedTime / 1000;
......@@ -595,6 +598,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
if ((algorithmPermissions & 0x01<<2) != 0) {
worker = (faSize - uniforms > 0) ? (faSize - uniforms) : 0;
}
qInfo()<<"工作人数==>"<<worker;
vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,worker,sSn,currentTime);
if (resp->code!= 0) {
qInfo()<<"人数变化推送信息推送失败";
......@@ -611,6 +615,8 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
qDebug() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId()<<sSn;
std::list<vides_data::faceRecognitionResult>faces;
faceReconitionHandle->doesItExistEmployee(frame,faces);
qInfo()<<"识别的人脸数量==>"<<faces.size();
if (faces.size()>0) {
for(auto face:faces){
vides_data::requestFaceReconition faceReconition;
......@@ -706,7 +712,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
if(countValue==0 ){
vides_data::requestLicensePlate initPlate;
initPlate.sn=sSn;
licensePlateRecogn.licensePlateNumber(frame, lpNumber,initPlate,currentTime);
algorithmTaskManage.executeFindlicensePlateNumber(frame, lpNumber,initPlate,currentTime,sSn);
if(initPlate.plates.size()==0){
batchRegionalPushLicensePlate(imgs,currentTime,initPlate);
if(initPlate.plates.size()>0){
......@@ -730,9 +736,9 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
}
vides_data::requestLicensePlate resultPlate;
resultPlate.sn=sSn;
licensePlateRecogn.licensePlateNumber(areaMat, lpNumber,resultPlate,currentTime);
algorithmTaskManage.executeFindlicensePlateNumber(areaMat, lpNumber,resultPlate,currentTime,sSn);
std::list<vides_data::LicensePlate>ps =resultPlate.plates;
qDebug()<<QString("sn==>%1,区域:%2识别的车牌信息是:%3").arg(sSn).arg(key).
arg(lpNumber);
......@@ -764,7 +770,9 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
continue;
}
vides_data::LicensePlate maxPlate;
licensePlateRecogn.filterLicensePlateConfidenceMax(resultPlate,maxPlate);
LicensePlateRecognition licensePlateRecognitionNew;
licensePlateRecognitionNew.filterLicensePlateConfidenceMax(resultPlate,maxPlate);
RecognizedInfo recognizedInfo;
if (maxPlate.new_color=="蓝牌" && maxPlate.new_plate.length() != 7) {
......
......@@ -76,16 +76,19 @@ void HumanDetection::setHuManParameter(float &height_reference,int &uniformColor
}
qint64 HumanDetection::getThreadTime()const{
return thread_time.load(std::memory_order_relaxed);
return thread_time.load(std::memory_order_acquire);
}
bool HumanDetection::getIsRunning()const{
return isRunning.load(std::memory_order_relaxed);
return isRunning.load(std::memory_order_acquire);
}
void HumanDetection::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_acquire);
}
//0 人形 1 车形 2 工服
int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap, std::vector<vides_data::ParkingArea> &currentPlate) {
isRunning.store(true, std::memory_order_relaxed);
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_relaxed);
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_acquire);
TCV_CameraStream *stream = TCV_CreateCameraStream();
......@@ -94,7 +97,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
// 释放相机流
TCV_ReleaseCameraStream(stream);
isRunning.store(false, std::memory_order_relaxed);
isRunning.store(false, std::memory_order_acquire);
});
......@@ -109,6 +112,8 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
if (res == 0x00 || res == 0x02) {
num = TCV_HumanDetectorGetNumOfHuman(detector);
qInfo()<<"TCV_HumanDetectorGetNumOfHuman==>"<<num;
if (num == 0) return num; // 无行人检测结果,提前返回
std::vector<TCV_ObjectLocation> results(num);
TCV_HumanDetectorGetHumanLocation(detector, results.data(), num);
......@@ -120,6 +125,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
int tenPlace = uniformColor / 10; // 十位
int onePlace = uniformColor % 10; // 个位
if (std::abs(person.y2 - person.y1) >= heightReference) {
vides_data::ParkingArea area;
area.topLeftCornerX=person.x1;
area.topLeftCornerY=person.y1;
......@@ -140,8 +146,8 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
}
reMap[0x02] = count_no_uniform; // 未穿工服的行人数量
reMap[0x00] = count_all; // 所有满足条件的行人数量
num = res == 0x00 ?count_all:count_no_uniform; // 更新num为实际计数
qDebug() << (res == 0 ? "findHuManCar 检测到的人数:" : "findHuManCar 未穿工服的人数:") << num;
num = (res == 0x00) ? count_all : count_no_uniform;
}
else if (res == 0x01) {
num = TCV_HumanDetectorGetNumOfCar(detector);
......
......@@ -27,7 +27,8 @@ public:
bool getIsRunning() const;
void onTimeout();
void setIsRunning(bool running);
......
#include "Common.h"
#include "LicensePlateRecognition.h"
#include "ScopeSemaphoreExit.h"
LicensePlateRecognition::LicensePlateRecognition() {}
LicensePlateRecognition::LicensePlateRecognition(const QString &modelPaths, float carConfidence) {
HLPR_ContextConfiguration configuration = {0};
QByteArray && by_mpath=modelPaths.toUtf8();
char* m_path=by_mpath.data();
configuration.models_path = m_path;
configuration.max_num = 5;
configuration.det_level = DETECT_LEVEL_LOW;
configuration.use_half = false;
configuration.nms_threshold = 0.5f;
configuration.rec_confidence_threshold = carConfidence;
configuration.box_conf_threshold = 0.30f;
configuration.threads = 1;
ctx = HLPR_CreateContext(&configuration);
}
LicensePlateRecognition::LicensePlateRecognition(){
}
LicensePlateRecognition::~LicensePlateRecognition(){
HLPR_ReleaseContext(ctx);
}
LicensePlateRecognition* LicensePlateRecognition::instance = nullptr;
void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber){
HLPR_ImageData data = {0};
data.data = source.data;
......@@ -96,27 +112,22 @@ void LicensePlateRecognition::filterLicensePlateConfidenceMax(vides_data::reques
}
}
}
qint64 LicensePlateRecognition::getThreadTime()const{
return thread_time.load(std::memory_order_acquire);
}
void LicensePlateRecognition::initHlprContext(const QString &modelPaths, float carConfidence){
if(ctx==nullptr){
HLPR_ContextConfiguration configuration = {0};
QByteArray && by_mpath=modelPaths.toUtf8();
char* m_path=by_mpath.data();
configuration.models_path = m_path;
configuration.max_num = 5;
configuration.det_level = DETECT_LEVEL_LOW;
configuration.use_half = false;
configuration.nms_threshold = 0.5f;
configuration.rec_confidence_threshold = carConfidence;
configuration.box_conf_threshold = 0.30f;
configuration.threads = 1;
ctx = HLPR_CreateContext(&configuration);
}
bool LicensePlateRecognition::getIsRunning()const{
return isRunning.load(std::memory_order_acquire);
}
void LicensePlateRecognition::setIsRunning(bool running) {
this->isRunning.store(running, std::memory_order_acquire);
}
void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime) {
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_acquire);
// 执行一帧图像数据检测行人
// create ImageData
......@@ -128,7 +139,11 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
data.rotation = CAMERA_ROTATION_0;
// create DataBuffer
P_HLPR_DataBuffer buffer = HLPR_CreateDataBuffer(&data);
ScopeSemaphoreExit streamGuard([this, buffer]() {
isRunning.store(false, std::memory_order_acquire);
HLPR_ReleaseDataBuffer(buffer);
});
HREESULT ret = HLPR_ContextQueryStatus(ctx);
if (ret != HResultCode::Ok) {
qInfo()<<QString("create error");
......@@ -139,7 +154,6 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
HLPR_ContextUpdateStream(ctx, buffer, &results);
if (results.plate_size <= 0) {
HLPR_ReleaseDataBuffer(buffer);
return;
}
......@@ -180,8 +194,7 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
}
lpNumber =lpResults.join("\t");
HLPR_ReleaseDataBuffer(buffer);
}
......@@ -8,19 +8,15 @@
#include <QTextStream>
#include <QFile>
#include <QImage>
#include <mutex>
#include <QRegularExpression>
#include <atomic>
const std::vector<std::string> types =
{"蓝牌", "黄牌单层", "白牌单层", "绿牌新能源", "黑牌港澳",
"香港单层", "香港双层", "澳门单层", "澳门双层", "黄牌双层"};
class LicensePlateRecognition{
public:
static LicensePlateRecognition& getInstance()
{
static LicensePlateRecognition instance;
return instance;
}
//识别车牌号
void licensePlateNumber(const cv::Mat &source,QString & lpNumber, vides_data::requestLicensePlate &plate,
qint64 currentTime);
......@@ -28,19 +24,30 @@ public:
void filterLicensePlateConfidenceMax(vides_data::requestLicensePlate &plate,vides_data::LicensePlate &max);
void oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber);
void initHlprContext(const QString &modelPaths,float carConfidence);
void replaceWith1And0( QString &code);
private:
static LicensePlateRecognition* instance;
P_HLPR_Context ctx=nullptr ;
LicensePlateRecognition(const QString &modelPaths,float carConfidence);
LicensePlateRecognition();
~LicensePlateRecognition();
qint64 getThreadTime() const;
bool getIsRunning() const;
void setIsRunning(bool running);
private:
P_HLPR_Context ctx=nullptr ;
std::atomic<qint64> thread_time;
std::atomic<bool> isRunning{false};
};
......
......@@ -97,7 +97,7 @@ SOURCES += \
HumanDetection.cpp \
ScopeSemaphoreExit.cpp \
FaceReconitionHandle.cpp \
HumanDetectionManage.cpp
AlgorithmTaskManage.cpp
HEADERS += \
Common.h \
......@@ -115,7 +115,7 @@ HEADERS += \
HumanDetection.h \
ScopeSemaphoreExit.h \
FaceReconitionHandle.h \
HumanDetectionManage.h
AlgorithmTaskManage.h
#FORMS += \
# mainwindow.ui
......
......@@ -52,11 +52,9 @@ MainWindow::MainWindow()
float carConfidence=qSetting->value("devices/carConfidence").toFloat();
LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance();
licensePlateRecogn.initHlprContext(modelPaths,carConfidence);
HumanDetectionManage &humanDetectionManage= HumanDetectionManage::getInstance(humanDetectionLen);
humanDetectionManage.initHumanDetectionManage(modelPaths,carShapeConfidence,heightReference,uniformColor);
AlgorithmTaskManage &algorithmTaskManage= AlgorithmTaskManage::getInstance(humanDetectionLen);
algorithmTaskManage.initHumanDetectionManage(modelPaths,carShapeConfidence,heightReference,uniformColor);
algorithmTaskManage.initLicensePlateManage(modelPaths,carConfidence);
QString httpurl;
QString profile=qSetting->value("cloudservice/profile","test").toString();
if(strcmp(profile.toUtf8().data(),vides_data::PROFLIE_TEST)==0 ){
......
......@@ -2,12 +2,11 @@
#define MAINWINDOW_H
#include "Common.h"
#include "LicensePlateRecognition.h"
#include "CameraHandle.h"
#include "HttpService.h"
#include "VidesData.h"
#include "MediaFaceImage.h"
#include "HumanDetectionManage.h"
#include "AlgorithmTaskManage.h"
#include <algorithm>
#include <QString>
#include <QTextCodec>
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment