Commit 5fe7062a by “liusq”

增加算法过滤和mark区域

parent 476b9ed9
...@@ -6,7 +6,8 @@ ...@@ -6,7 +6,8 @@
CameraHandle::CameraHandle(){ CameraHandle::CameraHandle(){
} }
CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &channel,const QString &modelPaths, float carConfidence,int imageSave) CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &channel,const QString &modelPaths, float carConfidence,
float carShapeConfidence,int imageSave)
: hDevice(-1), : hDevice(-1),
url(url), url(url),
loginParam(new SXSDKLoginParam()), loginParam(new SXSDKLoginParam()),
...@@ -16,12 +17,15 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch ...@@ -16,12 +17,15 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch
httpUrl(httpUrl), httpUrl(httpUrl),
dev_snap_syn_timer(new QTimer()), dev_snap_syn_timer(new QTimer()),
image_save(imageSave), image_save(imageSave),
faceCount(0),
semaphore(1) { semaphore(1) {
connect(this, SIGNAL(afterDownloadFile(int,int,QString)), this, SLOT(pushRecordToCloud(int,int,QString)),Qt::QueuedConnection); connect(this, SIGNAL(afterDownloadFile(int,int,QString)), this, SLOT(pushRecordToCloud(int,int,QString)),Qt::QueuedConnection);
detector = TCV_CreateHumanDetector(1); detector = TCV_CreateHumanDetector(1);
faceMapWorker.setX(0);
faceMapWorker.setY(0);
TCV_HumanDetectorSetHumanThreshold(detector,0.5f); TCV_HumanDetectorSetHumanThreshold(detector,0.5f);
TCV_HumanDetectorSetCarThreshold(detector,0.2f); TCV_HumanDetectorSetCarThreshold(detector,carShapeConfidence);
HLPR_ContextConfiguration configuration = {0}; HLPR_ContextConfiguration configuration = {0};
QByteArray && by_mpath=modelPaths.toUtf8(); QByteArray && by_mpath=modelPaths.toUtf8();
...@@ -86,6 +90,11 @@ int CameraHandle::sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName ...@@ -86,6 +90,11 @@ int CameraHandle::sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName
this->hDevice=loginResult; this->hDevice=loginResult;
return loginResult; return loginResult;
} }
void CameraHandle::initAlgorithmParameter(float &height_reference){
HumanDetection &humanDetection=HumanDetection::getInstance();
humanDetection.setHeightReference(height_reference);
}
int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int nParam2, int nParam3, const char* szString, void* pData, int64 pDataInfo, int nSeq, void* pUserData, void* pMsg){ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int nParam2, int nParam3, const char* szString, void* pData, int64 pDataInfo, int nSeq, void* pUserData, void* pMsg){
CameraHandle* cameraHandle=static_cast<CameraHandle*>(pUserData); CameraHandle* cameraHandle=static_cast<CameraHandle*>(pUserData);
std::map<QString, QString> &data=cameraHandle->getCurrentData(); std::map<QString, QString> &data=cameraHandle->getCurrentData();
...@@ -250,16 +259,15 @@ void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 fa ...@@ -250,16 +259,15 @@ void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 fa
} }
void CameraHandle::sdkRealTimeDevSnapSyn(int hDevice) { void CameraHandle::sdkRealTimeDevSnapSyn(int hDevice) {
QThreadPool* threadPool = QThreadPool::globalInstance(); QThreadPool* threadPool = QThreadPool::globalInstance();
threadPool->setMaxThreadCount(8); threadPool->setMaxThreadCount(12);
auto taskSyn = std::bind(&CameraHandle::sdkDevSnapSyn, this, hDevice, this->channel); //auto taskSyn = std::bind(&CameraHandle::sdkDevSnapSyn, this, hDevice, this->channel);
auto taskSyn = [this, hDevice]() {
sdkDevSnapSyn(hDevice, this->channel);
};
auto taskRunnable = new TaskRunnable(taskSyn, hDevice, this->channel, RunFunction::SdkDevSnapSyn); auto taskRunnable = new TaskRunnable(taskSyn, hDevice, this->channel, RunFunction::SdkDevSnapSyn);
threadPool->start(taskRunnable); threadPool->start(taskRunnable);
} }
QString CameraHandle::getSSn(){ QString CameraHandle::getSSn(){
return sSn; return sSn;
...@@ -271,10 +279,6 @@ void CameraHandle::setMediaHandle(int mediaHandle){ ...@@ -271,10 +279,6 @@ void CameraHandle::setMediaHandle(int mediaHandle){
this->mediaHandle=mediaHandle; this->mediaHandle=mediaHandle;
} }
void CameraHandle::setCurrentFace(int currentFace){
std::lock_guard<std::mutex> guard(faceMutex);
this->currentFace=currentFace;
}
std::map<QString, QString>&CameraHandle::getCurrentData(){ std::map<QString, QString>&CameraHandle::getCurrentData(){
return currentData; return currentData;
} }
...@@ -327,6 +331,7 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id, ...@@ -327,6 +331,7 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
} }
int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) { int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
if (stopRequested_) return -1; if (stopRequested_) return -1;
if (!semaphore.tryAcquire()) { if (!semaphore.tryAcquire()) {
qInfo() << "sdkDevSnapSyn:正在执行线程"; qInfo() << "sdkDevSnapSyn:正在执行线程";
return -1; return -1;
...@@ -366,6 +371,7 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) { ...@@ -366,6 +371,7 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
qInfo() << "图像尺寸或通道数不正确,需排查原因"; qInfo() << "图像尺寸或通道数不正确,需排查原因";
return -1; return -1;
} }
qDebug() << "callbackFunction request to: " << sSn;
updateImage(image, currentTime); updateImage(image, currentTime);
} }
...@@ -424,7 +430,7 @@ void CameraHandle::matToBase64(const cv::Mat &image, QByteArray &base64Data) { ...@@ -424,7 +430,7 @@ void CameraHandle::matToBase64(const cv::Mat &image, QByteArray &base64Data) {
base64Data = QByteArray(reinterpret_cast<const char*>(buffer.data()), buffer.size()).toBase64(); base64Data = QByteArray(reinterpret_cast<const char*>(buffer.data()), buffer.size()).toBase64();
} }
void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat &frame, RecognizedInfo& newInfo, void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat &frame, RecognizedInfo& newInfo,
int &result,std::map<int,RecognizedInfo>&exitAndMoMap){ int &result){
if (newInfo.getLicensePlate() != park->getCurrentPlate().getLicensePlate()) { if (newInfo.getLicensePlate() != park->getCurrentPlate().getLicensePlate()) {
int count = 0; int count = 0;
for (auto& info : park->getQueue()) { for (auto& info : park->getQueue()) {
...@@ -432,35 +438,41 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma ...@@ -432,35 +438,41 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
count++; count++;
} }
} }
qDebug() << "最新车牌" << newInfo.getLicensePlate() << "区域当前车牌" << park->getCurrentPlate().getLicensePlate();
qDebug() << "不同的区域:" << park->getSpaceIndex() << ",数量:" << count; qDebug() << "不同的区域:" << park->getSpaceIndex() << ",数量:" << count;
if (count >= 3) { if (count>= 3) {
//第一次进场 当前车牌就是进来这个,老车牌就是空 //第一次进场 当前车牌就是进来这个,老车牌就是空
if(park->getCurrentPlate().getLicensePlate().length()<=0){ if(park->getCurrentPlate().getLicensePlate().length()<=0){
//进场 //进场
park->setCurrentPlate(newInfo); park->setCurrentPlate(newInfo);
result=Mobilization; result=Mobilization;
}else { }else {
//当前为空, //当前为空,
if(newInfo.getLicensePlate().length()<=0){ if(newInfo.getLicensePlate().length()<=0){
HumanDetection &humanDetection=HumanDetection::getInstance(); HumanDetection &humanDetection=HumanDetection::getInstance();
int car_size = humanDetection.findHuManCar(frame,0x01,detector); std::vector<vides_data::ParkingArea> currentPlates;
int car_size = humanDetection.findHuManCar(frame,0x01,detector,currentPlates);
qDebug()<<sSn<<":"<<"当前车形数量:"<<car_size; qDebug()<<sSn<<":"<<"当前车形数量:"<<car_size;
if(car_size<=0){
//出场 if (car_size <= 0 ) {
qDebug() << sSn<<"区域:"<<park->getSpaceIndex() << ": 出场:";
//如果有车辆检测到并且不在停车区域内部,视为出场
park->setCurrentPlate(newInfo); park->setCurrentPlate(newInfo);
result=Exit; result = Exit;
}else{ }else {
park-> removeNoQueue(); // 没有车辆或车辆在停车区域内部,移除队列
qDebug()<<sSn<<":"<<"no出场:"<<car_size; park->removeNoQueue();
qDebug() << sSn << ": no出场:" << car_size;
} }
}else{ }else{
qDebug()<<sSn<<":"<<"出场:"<<2;
qDebug()<<sSn<<":"<<"老车出场:"<<park->getCurrentPlate().getLicensePlate();
qDebug()<<sSn<<":"<<"老车出场:"<<park->getCurrentPlate().getLicensePlate(); qDebug()<<sSn<<":"<<"老车出场:"<<park->getCurrentPlate().getLicensePlate();
qDebug()<<sSn<<":"<<"新车入场:"<<newInfo.getLicensePlate();
//当前不为空,新车,新车入场,老车出场 //当前不为空,新车,新车入场,老车出场
exitAndMoMap[Exit]=park->getCurrentPlate(); //exitAndMoMap[Exit]=park->getCurrentPlate();
exitAndMoMap[Mobilization]=newInfo; //exitAndMoMap[Mobilization]=newInfo;
park->setCurrentPlate(newInfo); park->setCurrentPlate(newInfo);
result=ExitAndMobilization; result=ExitAndMobilization;
} }
...@@ -470,17 +482,58 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma ...@@ -470,17 +482,58 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
} }
void CameraHandle::batchRegionalPushLicensePlate(QByteArray &imgs,qint64 currentTime,vides_data::requestLicensePlate &newPlate){
for(auto it = parkMap.begin(); it != parkMap.end(); ++it) {
ParkingSpaceInfo* value = it->second; // 获取对应的值(ParkingSpaceInfo指针)
vides_data::LicensePlate plates;
plates.time=currentTime;
plates.img=imgs;
plates.areaLocation=value->getArea();
newPlate.plates.push_back(plates);
}
}
void CameraHandle::matToAreaMask(const cv::Mat &source, std::map<int, cv::Mat> &maskFrame) {
for (auto iter = parkMap.begin(); iter != parkMap.end(); ++iter) {
int id = iter->first;
ParkingSpaceInfo* parkArea = iter->second;
// 转换浮点坐标为整型坐标
std::vector<cv::Point> parkAreaPoints = {
cv::Point(static_cast<int>(parkArea->getArea().topLeftCornerX), static_cast<int>(parkArea->getArea().topLeftCornerY)),
cv::Point(static_cast<int>(parkArea->getArea().topRightCornerX), static_cast<int>(parkArea->getArea().topRightCornerY)),
cv::Point(static_cast<int>(parkArea->getArea().bottomRightCornerX), static_cast<int>(parkArea->getArea().bottomRightCornerY)),
cv::Point(static_cast<int>(parkArea->getArea().bottomLeftCornerX), static_cast<int>(parkArea->getArea().bottomLeftCornerY))
};
// 创建与source相同大小的掩码图像,并用黑色填充
cv::Mat mask = cv::Mat::zeros(source.size(), CV_8UC1);
// 使用fillPoly填充多边形到mask
std::vector<std::vector<cv::Point>> parkAreas = { parkAreaPoints };
cv::fillPoly(mask, parkAreas, cv::Scalar(255)); // 使用白色(255)填充指定的多边形区域
// 使用掩码从source中提取区域,同时保持原始大小
cv::Mat maskedSource = cv::Mat::zeros(source.size(), source.type());
source.copyTo(maskedSource, mask);
// 将带有掩码的图像存入maskFrame
maskFrame[id] = maskedSource;
}
}
bool CameraHandle::isChanged(const QPoint& newInfo, const QPoint& current) {
const double epsilon = 1e-6; // 定义一个非常小的阈值
double distance = std::sqrt(std::pow(newInfo.x() - current.x(), 2) + std::pow(newInfo.y() - current.y(), 2));
return distance > epsilon; // 如果距离大于阈值,则认为发生了变化
}
void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
qDebug()<<"=============================>"; qDebug()<<"=============================>";
static int i=0;
printf("updateImage%d次\n", ++i);
faceCount.fetch_add(1, std::memory_order_relaxed);
qDebug()<<"faceCount==>"<<faceCount.load(std::memory_order_relaxed);
int width = frame.cols; // 获取图像宽度 int width = frame.cols; // 获取图像宽度
int height = frame.rows; // 获取图像高度 int height = frame.rows; // 获取图像高度
qDebug()<<"frame 宽度:"<<width<<"frame 高度:"<<height; qDebug()<<"frame 宽度:"<<width<<"frame 高度:"<<height;
...@@ -488,43 +541,42 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -488,43 +541,42 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
HumanDetection &humanDetection=HumanDetection::getInstance(); HumanDetection &humanDetection=HumanDetection::getInstance();
LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance(); LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance();
static int ii=0;
printf("updateImage retryCount: %d \n", ++ii); std::map<QString,vides_data::requestFaceReconition> mapFaces;
//faceRecognition.search(frame,imageHandleList,names);
QByteArray imgs; QByteArray imgs;
this->matToBase64(frame, imgs); this->matToBase64(frame, imgs);
HttpService httpService(httpUrl); HttpService httpService(httpUrl);
int faSize =0; int faSize =0;
std::vector<vides_data::ParkingArea> currentPlates;
int uniforms=0x00;
if ((algorithmPermissions & 0x01<<2) != 0) {
uniforms=humanDetection.findHuManCar(frame,0x02,detector,currentPlates);
}
if ((algorithmPermissions & 0x01<<1) != 0) { if ((algorithmPermissions & 0x01<<1) != 0) {
faSize=humanDetection.findHuManCar(frame,0x00,detector); faSize=humanDetection.findHuManCar(frame,0x00,detector,currentPlates);
QPoint point_info(faSize,uniforms);
if(currentFace!= faSize){ if(isChanged(point_info,faceMapWorker)){
if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){ if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){
vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,sSn,currentTime); int worker=0x00;
if ((algorithmPermissions & 0x01<<2) != 0) {
worker = (faSize - uniforms > 0) ? (faSize - uniforms) : 0;
}
vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,worker,sSn,currentTime);
if (resp->code!= 0) { if (resp->code!= 0) {
qInfo()<<"人数变化推送信息推送失败"; qInfo()<<"人数变化推送信息推送失败";
} }
instace.deleteObj(resp); instace.deleteObj(resp);
currentFace = faSize;
} faceMapWorker.setX(faSize);
} faceMapWorker.setY(uniforms);
} }
if ((algorithmPermissions & 0x01<<2) != 0) {
int uniforms=humanDetection.findHuManCar(frame,0x02,detector);
if(uniforms>0 ){
if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){
httpService.setHttpUrl(httpUrl);
vides_data::response* resp=httpService.httpPostUniforms(imgs,uniforms,sSn,currentTime);
if (resp->code!= 0) {
qInfo()<<"推送未穿工服人数失败";
} }
instace.deleteObj(resp);
} }
}
}
if(faSize>0 ){ if(faSize>0 ){
qDebug() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId(); qDebug() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId()<<sSn;
std::list<vides_data::faceRecognitionResult>faces; std::list<vides_data::faceRecognitionResult>faces;
faceRecognition.doesItExistEmployee(frame,faces); faceRecognition.doesItExistEmployee(frame,faces);
if (faces.size()>0) { if (faces.size()>0) {
...@@ -546,6 +598,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -546,6 +598,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
httpService.setHttpUrl(httpUrl); httpService.setHttpUrl(httpUrl);
vides_data::response* resp = httpService.httpPostFaceReconition(faceReconition); vides_data::response* resp = httpService.httpPostFaceReconition(faceReconition);
mapFaces.insert(std::make_pair( face.id, faceReconition));
if (resp->code!= 0) { if (resp->code!= 0) {
qInfo()<<"识别人code"<<resp->code; qInfo()<<"识别人code"<<resp->code;
qInfo()<<"识别人msg"<<resp->msg; qInfo()<<"识别人msg"<<resp->msg;
...@@ -555,13 +608,27 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -555,13 +608,27 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
} }
} }
} }
if ((algorithmPermissions & 0x01<<2) != 0) {
if(uniforms>0 ){
//未穿工服的人数
std::list<QString> outUniforms;
faceUniformOverlap(mapFaces,currentPlates,outUniforms);
for(auto strUniform:outUniforms){
httpService.setHttpUrl(httpUrl);
vides_data::response* resp=httpService.httpPostUniforms(imgs,strUniform, sSn,currentTime);
if (resp->code!= 0) {
qInfo()<<"推送未穿工服人数失败";
}
instace.deleteObj(resp);
}
}
}
//关闭车牌识别 //关闭车牌识别
if ((algorithmPermissions & 0x01) == 0) { if ((algorithmPermissions & 0x01) == 0) {
return ; return ;
} }
QString lpNumber; QString lpNumber;
vides_data::requestLicensePlate plate;
plate.sn=sSn;
// return ; // return ;
if(image_save==1){ if(image_save==1){
...@@ -575,24 +642,77 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -575,24 +642,77 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
} }
} }
std::map<int,cv::Mat>areaMat;
matToAreaMask(frame,areaMat);
if(image_save==2){
for (auto it = areaMat.begin(); it != areaMat.end(); ++it) {
int key = it->first;
cv::Mat areaMat = it->second;
QString fileName= instace.getVideoDownload().append(QString::number( key)+".jpg");
bool success = cv::imwrite(fileName.toStdString(), areaMat);
if (success) {
qDebug() << "图片已成功保存至:" << fileName;
} else {
qDebug() << "图片保存失败!";
}
}
}
licensePlateRecogn.licensePlateNumber(frame, lpNumber,plate,currentTime,ctx);
std::map<int,RecognizedInfo>exitMoMap;
vides_data::requestLicensePlate newPlate; vides_data::requestLicensePlate newPlate;
newPlate.sn=sSn; newPlate.sn=sSn;
qDebug()<<QString("sn==>%1,识别的车牌信息是:%2").arg(sSn).arg(lpNumber);
std::list<vides_data::LicensePlate>ps=plate.plates; uint64_t countValue = faceCount.load(std::memory_order_relaxed);
int res =-1;
this->matToBase64(frame, imgs); if(countValue==0 ){
vides_data::requestLicensePlate initPlate;
initPlate.sn=sSn;
licensePlateRecogn.licensePlateNumber(frame, lpNumber,initPlate,currentTime,ctx);
if(initPlate.plates.size()==0){
batchRegionalPushLicensePlate(imgs,currentTime,initPlate);
if(initPlate.plates.size()>0){
licensePlateRecognitionResults(initPlate);
}
}
}
faceCount.fetch_add(1, std::memory_order_relaxed);
qDebug()<<"faceCount==>"<<faceCount.load(std::memory_order_relaxed);
for (auto it = areaMat.begin(); it != areaMat.end(); ++it) {
int key = it->first;
cv::Mat areaMat = it->second;
std::map<int, ParkingSpaceInfo*>::iterator parkAreaMap = parkMap.find(key);
ParkingSpaceInfo* value=nullptr;
if (parkAreaMap != parkMap.end()) {
value = parkAreaMap->second; // 成功找到,获取
} else {
qDebug()<<sSn<<"==>区域不存在:"<<key;
continue;
}
vides_data::requestLicensePlate resultPlate;
resultPlate.sn=sSn;
licensePlateRecogn.licensePlateNumber(areaMat, lpNumber,resultPlate,currentTime,ctx);
std::list<vides_data::LicensePlate>ps =resultPlate.plates;
qDebug()<<QString("sn==>%1,区域:%2识别的车牌信息是:%3").arg(sSn).arg(key).
arg(lpNumber);
if(ps.size()==0){ if(ps.size()==0){
for (auto it = parkMap.begin(); it != parkMap.end(); ++it) { int res=-1;
ParkingSpaceInfo* value = it->second; // 获取值 if(value==nullptr){
continue;
}
if(value->getQueue().size()>=10) { if(value->getQueue().size()>=10) {
value->removeQueue(); value->removeQueue();
} }
RecognizedInfo recognizedInfo(lpNumber,QDateTime::currentSecsSinceEpoch(),"未知"); RecognizedInfo recognizedInfo("",QDateTime::currentSecsSinceEpoch(),"未知");
value->addQueue(recognizedInfo); value->addQueue(recognizedInfo);
this->checkAndUpdateCurrentPlate(value,frame,recognizedInfo,res,exitMoMap); // 使用value指向的ParkingSpaceInfo对象
this->checkAndUpdateCurrentPlate(value,areaMat,recognizedInfo,res);
if (res == Exit || res == Mobilization) { if (res == Exit || res == Mobilization) {
vides_data::LicensePlate current; vides_data::LicensePlate current;
current.areaLocation=value->getArea(); current.areaLocation=value->getArea();
...@@ -601,134 +721,93 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -601,134 +721,93 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
current.new_plate=recognizedInfo.getLicensePlate(); current.new_plate=recognizedInfo.getLicensePlate();
current.time=recognizedInfo.getRecognizeTime(); current.time=recognizedInfo.getRecognizeTime();
newPlate.plates.push_back(std::move(current)); newPlate.plates.push_back(std::move(current));
} qDebug()<<QString("当前进入ps.size()==0是当前校验返回结果是:%1").arg(res);
} }
}else{ }else{
std::unordered_map<int, vides_data::LicensePlate> indexToLicensePlate; int res =-1;
for (auto it_ps = ps.begin(); it_ps != ps.end(); ++it_ps) { if(value==nullptr){
vides_data::LicensePlate& currentPlate = *it_ps; continue;
ParkingSpaceInfo newcurrentPlate; }
newcurrentPlate.setArea(currentPlate.recognition); vides_data::LicensePlate maxPlate;
int index = this->findPointRegion(newcurrentPlate); licensePlateRecogn.filterLicensePlateConfidenceMax(resultPlate,maxPlate);
qDebug()<<sSn<<"==>识别的区域:"<<index;
indexToLicensePlate[index] = currentPlate;
}
for (auto it = parkMap.begin(); it != parkMap.end(); ++it) {
int key = it->first;
ParkingSpaceInfo* value = it->second; // 获取值
if (indexToLicensePlate.count(key) > 0) {
vides_data::LicensePlate recognition= indexToLicensePlate.at(key);
RecognizedInfo recognizedInfo; RecognizedInfo recognizedInfo;
if (recognition.new_color=="蓝牌" && recognition.new_plate.length() != 7) { if (maxPlate.new_color=="蓝牌" && maxPlate.new_plate.length() != 7) {
qDebug()<<sSn<<"==>蓝牌车牌号:"<<recognition.new_plate<<"===>recognition.new_plate.length():"<<recognition.new_plate.length(); qDebug()<<sSn<<"==>蓝牌车牌号:"<<maxPlate.new_plate<<"===>recognition.new_plate.length():"<<maxPlate.new_plate.length();
continue; continue;
} else if (recognition.new_color=="绿牌新能源" && recognition.new_plate.length() != 8) { } else if (maxPlate.new_color=="绿牌新能源" && maxPlate.new_plate.length() != 8) {
qDebug()<<sSn<<"==>绿牌车牌号:"<<recognition.new_plate<<"===>recognition.new_plate.length():"<<recognition.new_plate.length(); qDebug()<<sSn<<"==>绿牌车牌号:"<<maxPlate.new_plate<<"===>recognition.new_plate.length():"<<maxPlate.new_plate.length();
continue; continue;
} else if (recognition.new_plate.length() < 7) { } else if (maxPlate.new_plate.length() < 7) {
qDebug()<<sSn<<"==>非绿牌蓝牌车牌号:"<<recognition.new_plate<<"===>recognition.new_plate.length():"<<recognition.new_plate.length(); qDebug()<<sSn<<"==>非绿牌蓝牌车牌号:"<<maxPlate.new_plate<<"===>recognition.new_plate.length():"<<maxPlate.new_plate.length();
continue; continue;
} }
if(recognition.text_confidence>=instace.getCarConfidenceMax()){ if(maxPlate.text_confidence>=instace.getCarConfidenceMax()){
if(value->getQueue().size()>=7 && value->getQueue().size()<=10) { if(value->getQueue().size()>=7 && value->getQueue().size()<=10) {
for (int i = 0; i < 3; ++i) { for (int i = 0; i < 3; ++i) {
value->removeQueue(); value->removeQueue();
} }
} }
for (int var = 0; var < 3; ++var) { for (int var = 0; var < 3; ++var) {
RecognizedInfo info(recognition.new_plate,recognition.time,recognition.new_color); RecognizedInfo info(maxPlate.new_plate,maxPlate.time,maxPlate.new_color);
value->addQueue(info); value->addQueue(info);
recognizedInfo=std::move(info); recognizedInfo=std::move(info);
} }
this->checkAndUpdateCurrentPlate(value,frame,recognizedInfo,res,exitMoMap); this->checkAndUpdateCurrentPlate(value,areaMat,recognizedInfo,res);
} }
if(recognition.text_confidence<=instace.getCarConfidenceMin()){ if(maxPlate.text_confidence<=instace.getCarConfidenceMin()){
qDebug()<<sSn<<"==>recognition.text_confidence<=instace.getCarConfidenceMin"<<instace.getCarConfidenceMin(); qDebug()<<sSn<<"==>recognition.text_confidence<=instace.getCarConfidenceMin"<<instace.getCarConfidenceMin();
continue; continue;
} }
if(recognition.text_confidence>instace.getCarConfidenceMin() if(maxPlate.text_confidence>instace.getCarConfidenceMin()
&& recognition.text_confidence<instace.getCarConfidenceMax()) && maxPlate.text_confidence<instace.getCarConfidenceMax())
{ {
if(value->getQueue().size()>=10) { if(value->getQueue().size()>=10) {
value->removeQueue(); value->removeQueue();
} }
RecognizedInfo info(recognition.new_plate,recognition.time,recognition.new_color); RecognizedInfo info(maxPlate.new_plate,maxPlate.time,maxPlate.new_color);
value->addQueue(info); value->addQueue(info);
recognizedInfo=std::move(info); recognizedInfo=std::move(info);
this->checkAndUpdateCurrentPlate(value,frame,recognizedInfo,res,exitMoMap); this->checkAndUpdateCurrentPlate(value,areaMat,recognizedInfo,res);
} }
qDebug()<<sSn<<"==>checkAndUpdateCurrentPlate结果是"<<res; qDebug()<<sSn<<"==>checkAndUpdateCurrentPlate结果是"<<res;
if (res == Exit || res == Mobilization) { if (res == Exit || res == Mobilization) {
recognition.areaLocation=value->getArea(); maxPlate.areaLocation=value->getArea();
recognition.img=imgs; maxPlate.img=imgs;
recognition.new_color=recognizedInfo.getColor(); maxPlate.new_color=recognizedInfo.getColor();
newPlate.plates.push_back(std::move(recognition)); newPlate.plates.push_back(std::move(maxPlate));
qDebug()<<QString("当前进入ps.size()>0 --> res == Exit || res == Mobilization 是当前校验返回结果是:%1").arg(res);
} }
if(res==ExitAndMobilization){ if(res==ExitAndMobilization){
if(exitMoMap.size()>0){ maxPlate.areaLocation=value->getArea();
recognition.areaLocation=value->getArea(); maxPlate.img=imgs;
recognition.img=imgs; maxPlate.new_color=recognizedInfo.getColor();
recognition.new_color=recognizedInfo.getColor(); newPlate.plates.push_back(std::move(maxPlate));
newPlate.plates.push_back(std::move(recognition));
RecognizedInfo exitInfo=exitMoMap[Exit];
vides_data::LicensePlate oldInfo;
oldInfo.areaLocation=value->getArea();
oldInfo.img=imgs;
oldInfo.new_color=exitInfo.getColor();
oldInfo.new_plate=exitInfo.getLicensePlate();
oldInfo.time=exitInfo.getRecognizeTime();
newPlate.plates.push_back(std::move(oldInfo));
}
}
}else{
if(value->getQueue().size()>=10) {
value->removeQueue();
}
RecognizedInfo recognizedInfo("", QDateTime::currentSecsSinceEpoch(), "未知");
value->addQueue(recognizedInfo);
int res;
this->checkAndUpdateCurrentPlate(value, frame, recognizedInfo, res,exitMoMap);
if (res == Exit || res == Mobilization) {
vides_data::LicensePlate current;
current.areaLocation = value->getArea();
current.img = imgs;
current.new_color = recognizedInfo.getColor();
current.new_plate = recognizedInfo.getLicensePlate();
current.time = recognizedInfo.getRecognizeTime();
current.recognition=value->getArea();
newPlate.plates.push_back(std::move(current));
}
if(res==ExitAndMobilization){
vides_data::LicensePlate current;
current.areaLocation = value->getArea();
current.img = imgs;
current.new_color = recognizedInfo.getColor();
current.new_plate = recognizedInfo.getLicensePlate();
current.time = recognizedInfo.getRecognizeTime();
newPlate.plates.push_back(std::move(current)); // RecognizedInfo exitInfo=exitMoMap[Exit];
RecognizedInfo exitInfo=exitMoMap[Exit]; // vides_data::LicensePlate oldInfo;
vides_data::LicensePlate oldInfo; // oldInfo.areaLocation=value->getArea();
oldInfo.areaLocation=value->getArea(); // //oldInfo.img=imgs;
oldInfo.img=imgs; // oldInfo.new_color=exitInfo.getColor();
oldInfo.new_color=exitInfo.getColor(); // oldInfo.new_plate=exitInfo.getLicensePlate();
oldInfo.new_plate=exitInfo.getLicensePlate(); // oldInfo.time=exitInfo.getRecognizeTime();
oldInfo.time=exitInfo.getRecognizeTime(); // newPlate.plates.push_back(std::move(oldInfo));
newPlate.plates.push_back(std::move(oldInfo));
}
} }
} }
} }
qDebug()<<QString("%1==>当前车牌数量:%2").arg(sSn).arg(ps.size());
qDebug()<<QString("%1==>当前车牌数量:%2").arg(sSn).arg(newPlate.plates.size());
if(newPlate.plates.size()>0){ if(newPlate.plates.size()>0){
licensePlateRecognitionResults(newPlate); foreach (auto var, newPlate.plates) {
foreach (auto var, plate.plates) {
qDebug()<<QString("sn:%1 =>识别的车牌号是:%2").arg(sSn).arg(var.new_plate); qDebug()<<QString("sn:%1 =>识别的车牌号是:%2").arg(sSn).arg(var.new_plate);
} }
licensePlateRecognitionResults(newPlate);
} }
} }
void CameraHandle::findIp(QString &ip){ void CameraHandle::findIp(QString &ip){
ip=QString::fromStdString(loginParam->sDevId); ip=QString::fromStdString(loginParam->sDevId);
...@@ -772,50 +851,58 @@ void CameraHandle::pushRecordToCloud(int id, int recognitionType, QString ossUrl ...@@ -772,50 +851,58 @@ void CameraHandle::pushRecordToCloud(int id, int recognitionType, QString ossUrl
void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlate &location){ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlate &location){
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
QByteArray imgs;
int maxRetryCount = 2; // 最大重试次数
int retryCount = 0; // 当前重试次数
bool requestSuccess = false; // 标记请求是否成功
while (retryCount < maxRetryCount && !requestSuccess) {
HttpService httpService(httpUrl); HttpService httpService(httpUrl);
std::list<vides_data::responseRecognitionData> result; std::list<vides_data::responseRecognitionData> result;
vides_data::response*resp= httpService.httpLicensePlateRecognition(location,result); vides_data::response* resp = httpService.httpLicensePlateRecognition(location, result);
if (resp->code == 0) { if (resp->code != 0) {
if(result.size()==0){
return ;
}
vides_data::responseStsCredentials sts_credentials=HttpService::stsCredentials;
QString oUrl = "http://" + sts_credentials.bucket + "." + sts_credentials.endpoint;
currentData["ossUrl"] =oUrl;
currentData["bucket"] = sts_credentials.bucket;
currentData["access_key_id"] =sts_credentials.access_key_id;
currentData["access_key_secret"] =sts_credentials.access_key_secret;
currentData["security_token"]=sts_credentials.security_token;
// foreach (auto var, result) {
// vides_data::responseRecognitionData data;
// data.id=var.id;
// data.inTime=var.inTime;
// data.outTime=var.outTime;
// data.recognitionType=var.recognitionType;
// data.sn=var.sn;
// videoCurrentData[var.id]=data;
// sdkDownloadFileByTime(this->hDevice,var.id,
// instace.timestampToDateString(var.inTime),instace.timestampToDateString(var.outTime));
// }
requestSuccess = true;
} else {
++retryCount;
}
instace.deleteObj(resp);
}
if (!requestSuccess) {
qInfo()<<"licensePlateRecognitionResults:车牌识别结果失败"; qInfo()<<"licensePlateRecognitionResults:车牌识别结果失败";
// 在达到最大重试次数且仍然没有成功的情况下执行相应的处理逻辑 // 在达到最大重试次数且仍然没有成功的情况下执行相应的处理逻辑
} }
instace.deleteObj(resp);
} }
void CameraHandle::printWifi(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg){
char szOutBuffer[4000] = { 0 };
int nInOutSize = sizeof(szOutBuffer);
// 获取并解析配置
int nResult = XSDK_DevGetSysConfigSyn(hDevice, JK_NetWork_Wifi, szOutBuffer, &nInOutSize, 4000, EXCMD_CONFIG_GET);
qDebug()<<szOutBuffer;
if (nResult >= 0) {
cfg.Parse(szOutBuffer);
} else {
printf("Failed to get Wi-Fi configuration. Error code: %d\n", nResult);
}
}
void CameraHandle::sdkWifi(QString &pwd,QString &ssid){
XSDK_CFG::NetWork_Wifi wif;
printWifi(hDevice,wif);
QByteArray && byPwd = pwd.toUtf8();
const char * cpwd= byPwd.data();
wif.Keys.SetValue(cpwd);
QByteArray && byJson = ssid.toUtf8();
const char * cssid= byJson.data();
wif.SSID.SetValue(cssid);
wif.Enable.SetValue(true);
wif.KeyType.SetValue(1);
const char* wipCfg = wif.ToString();
char szOutBuffer[512] = { 0 };
int nLen = sizeof(szOutBuffer);
int res =XSDK_DevSetSysConfigSyn(hDevice, JK_NetWork_Wifi, wipCfg, strlen(wipCfg), szOutBuffer, &nLen, 5000, EXCMD_CONFIG_SET);
if(res<0){
qInfo() << "修改wifi失败";
}
}
void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){ void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){
QByteArray bTime =time.toUtf8(); QByteArray bTime =time.toUtf8();
const char* cTime=bTime.data(); const char* cTime=bTime.data();
...@@ -929,9 +1016,111 @@ double CameraHandle::calculateIntersectionArea(const QPolygonF &polygon1, const ...@@ -929,9 +1016,111 @@ double CameraHandle::calculateIntersectionArea(const QPolygonF &polygon1, const
double CameraHandle::ccw(const QPointF& a, const QPointF& b, const QPointF& c) { double CameraHandle::ccw(const QPointF& a, const QPointF& b, const QPointF& c) {
return (b.x() - a.x()) * (c.y() - a.y()) - (c.x() - a.x()) * (b.y() - a.y()); return (b.x() - a.x()) * (c.y() - a.y()) - (c.x() - a.x()) * (b.y() - a.y());
} }
int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){ //顺时针
//左下、右下、右上、左上。 bool CameraHandle::isClockwise(const std::vector<cv::Point2f>& polygon) {
double maxIntersectionArea = 0.0; float sum = 0.0;
for (size_t i = 0; i < polygon.size(); ++i) {
cv::Point2f current = polygon[i];
cv::Point2f next = polygon[(i + 1) % polygon.size()];
sum += (next.x - current.x) * (next.y + current.y);
}
return sum > 0;
}
void CameraHandle::faceUniformOverlap(std::map<QString, vides_data::requestFaceReconition>& mapFaces,
std::vector<vides_data::ParkingArea>& uniforms,
std::list<QString>& outUniforms) {
const float epsilon = 1e-5;
for (auto iter = mapFaces.begin(); iter != mapFaces.end(); ++iter) {
QString id = iter->first; // 人员id
vides_data::requestFaceReconition value = iter->second;
std::vector<cv::Point2f> faceAreaPoints = {
cv::Point2f(value.area.top_left_corner_x, value.area.top_left_corner_y),
cv::Point2f(value.area.top_right_corner_x, value.area.top_right_corner_y),
cv::Point2f(value.area.bottom_right_corner_x, value.area.bottom_right_corner_y),
cv::Point2f(value.area.bottom_left_corner_x, value.area.bottom_left_corner_y)
};
if (!isClockwise(faceAreaPoints)) {
std::reverse(faceAreaPoints.begin(), faceAreaPoints.end());
}
float maxIntersectionArea = 0.0;
int maxUniformIndex = -1;
for (size_t i = 0; i < uniforms.size(); ++i) {
std::vector<cv::Point2f> uniformAreaPoints = {
cv::Point2f(uniforms[i].topLeftCornerX, uniforms[i].topLeftCornerY),
cv::Point2f(uniforms[i].topRightCornerX, uniforms[i].topRightCornerY),
cv::Point2f(uniforms[i].bottomRightCornerX, uniforms[i].bottomRightCornerY),
cv::Point2f(uniforms[i].bottomLeftCornerX, uniforms[i].bottomLeftCornerY)
};
if (!isClockwise(uniformAreaPoints)) {
std::reverse(uniformAreaPoints.begin(), uniformAreaPoints.end());
}
std::vector<cv::Point2f> intersection;
float intersectionArea = cv::intersectConvexConvex(faceAreaPoints, uniformAreaPoints, intersection, true);
if (intersectionArea > maxIntersectionArea) {
maxIntersectionArea = intersectionArea;
maxUniformIndex = static_cast<int>(i);
}
}
if (maxUniformIndex != -1 && maxIntersectionArea > epsilon) {
outUniforms.push_back(id);
}
}
}
bool CameraHandle::isAnyOverlap(ParkingSpaceInfo *parkArea, std::vector<vides_data::ParkingArea> &currentPlates) {
// 将parkArea转换为cv::Point2f的顶点列表
std::vector<cv::Point2f> parkAreaPoints = {
cv::Point2f(parkArea->getArea().topLeftCornerX, parkArea->getArea().topLeftCornerY),
cv::Point2f(parkArea->getArea().topRightCornerX, parkArea->getArea().topRightCornerY),
cv::Point2f(parkArea->getArea().bottomRightCornerX, parkArea->getArea().bottomRightCornerY),
cv::Point2f(parkArea->getArea().bottomLeftCornerX, parkArea->getArea().bottomLeftCornerY)
};
if (!isClockwise(parkAreaPoints)) {
std::reverse(parkAreaPoints.begin(), parkAreaPoints.end());
}
// 遍历currentPlates中的每一个区域,检查是否存在重合
for (const auto &plateArea : currentPlates) {
// 构建当前plateArea的顶点列表
std::vector<cv::Point2f> plateAreaPoints = {
cv::Point2f(plateArea.topLeftCornerX, plateArea.topLeftCornerY),
cv::Point2f(plateArea.topRightCornerX, plateArea.topRightCornerY),
cv::Point2f(plateArea.bottomRightCornerX, plateArea.bottomRightCornerY),
cv::Point2f(plateArea.bottomLeftCornerX, plateArea.bottomLeftCornerY)
};
if (!isClockwise(plateAreaPoints)) {
std::reverse(plateAreaPoints.begin(), plateAreaPoints.end());
}
std::vector<cv::Point2f> intersection;
// 使用cv::intersectConvexConvex计算交集
float intersectionArea= cv::intersectConvexConvex(parkAreaPoints, plateAreaPoints, intersection, true);
const float epsilon = 1e-5;
// 如果交集不为空,说明有重合
if (intersectionArea>epsilon && !intersection.empty()) {
return true;
}
}
// 所有车牌区域均与parkArea无重合
return false;
}
int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea) {
float maxIntersectionArea = 0.0;
int areaOfMaxIntersection = -1; int areaOfMaxIntersection = -1;
std::vector<cv::Point2f> currentPolygonPoints = { std::vector<cv::Point2f> currentPolygonPoints = {
cv::Point2f(prakArea.getArea().topLeftCornerX, prakArea.getArea().topLeftCornerY), cv::Point2f(prakArea.getArea().topLeftCornerX, prakArea.getArea().topLeftCornerY),
...@@ -939,10 +1128,16 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){ ...@@ -939,10 +1128,16 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){
cv::Point2f(prakArea.getArea().bottomRightCornerX, prakArea.getArea().bottomRightCornerY), cv::Point2f(prakArea.getArea().bottomRightCornerX, prakArea.getArea().bottomRightCornerY),
cv::Point2f(prakArea.getArea().bottomLeftCornerX, prakArea.getArea().bottomLeftCornerY) cv::Point2f(prakArea.getArea().bottomLeftCornerX, prakArea.getArea().bottomLeftCornerY)
}; };
if (!isClockwise(currentPolygonPoints)) {
std::reverse(currentPolygonPoints.begin(), currentPolygonPoints.end());
}
qDebug() << "Current Polygon Points:"; qDebug() << "Current Polygon Points:";
for (const auto& point : currentPolygonPoints) { for (const auto& point : currentPolygonPoints) {
qDebug() << "(" << point.x << ", " << point.y << ")"; qDebug() << "(" << point.x << ", " << point.y << ")";
} }
for (ParkingSpaceInfo *info : parkingSpaceInfos) { for (ParkingSpaceInfo *info : parkingSpaceInfos) {
std::vector<cv::Point2f> polygonInfoPoints = { std::vector<cv::Point2f> polygonInfoPoints = {
cv::Point2f(info->getArea().topLeftCornerX, info->getArea().topLeftCornerY), cv::Point2f(info->getArea().topLeftCornerX, info->getArea().topLeftCornerY),
...@@ -950,20 +1145,26 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){ ...@@ -950,20 +1145,26 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){
cv::Point2f(info->getArea().bottomRightCornerX, info->getArea().bottomRightCornerY), cv::Point2f(info->getArea().bottomRightCornerX, info->getArea().bottomRightCornerY),
cv::Point2f(info->getArea().bottomLeftCornerX, info->getArea().bottomLeftCornerY) cv::Point2f(info->getArea().bottomLeftCornerX, info->getArea().bottomLeftCornerY)
}; };
// 打印 polygonInfoPoints 的值
// Ensure polygonInfoPoints are in clockwise order
if (!isClockwise(polygonInfoPoints)) {
std::reverse(polygonInfoPoints.begin(), polygonInfoPoints.end());
}
qDebug() << "Polygon Info Points for Space " << info->getSpaceIndex() << ":"; qDebug() << "Polygon Info Points for Space " << info->getSpaceIndex() << ":";
for (const auto& point : polygonInfoPoints) { for (const auto& point : polygonInfoPoints) {
qDebug() << "(" << point.x << ", " << point.y << ")"; qDebug() << "(" << point.x << ", " << point.y << ")";
} }
std::vector<cv::Point2f> intersection; std::vector<cv::Point2f> intersection;
double intersectionArea = cv::intersectConvexConvex(polygonInfoPoints, currentPolygonPoints, intersection, true); float intersectionArea = cv::intersectConvexConvex(polygonInfoPoints, currentPolygonPoints, intersection, true);
if (intersectionArea>0.0 && intersectionArea > maxIntersectionArea) {
const float epsilon = 1e-5;
if (intersectionArea > epsilon && intersectionArea > maxIntersectionArea) {
maxIntersectionArea = intersectionArea; maxIntersectionArea = intersectionArea;
areaOfMaxIntersection = info->getSpaceIndex(); areaOfMaxIntersection = info->getSpaceIndex();
} }
} }
return areaOfMaxIntersection; return areaOfMaxIntersection;
} }
......
...@@ -11,6 +11,7 @@ ...@@ -11,6 +11,7 @@
#include "Json_Header/System_TimeZone.h" #include "Json_Header/System_TimeZone.h"
#include "Json_Header/RecordCfg.h" #include "Json_Header/RecordCfg.h"
#include "Json_Header/NetWork_SPVMN.h" #include "Json_Header/NetWork_SPVMN.h"
#include "Json_Header/NetWork_Wifi.h"
#include "Json_Header/SystemInfo.h" #include "Json_Header/SystemInfo.h"
#include "Json_Header/OPMachine.h" #include "Json_Header/OPMachine.h"
#include "mainwindow.h" #include "mainwindow.h"
...@@ -40,8 +41,9 @@ enum CAR_INFORMATION { ...@@ -40,8 +41,9 @@ enum CAR_INFORMATION {
class CameraHandle: public QObject { class CameraHandle: public QObject {
Q_OBJECT Q_OBJECT
public: public:
CameraHandle(QString &url,QString &httpUrl,QString &sSn, int &channel,const QString &modelPaths, CameraHandle(QString &url,QString &httpUrl,QString &sSn, int &channel,
float carConfidence,int imageSave); const QString &modelPaths,
float carConfidence,float carShapeConfidence, int imageSave);
CameraHandle(); CameraHandle();
~CameraHandle(); ~CameraHandle();
int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout); int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
...@@ -54,19 +56,30 @@ public: ...@@ -54,19 +56,30 @@ public:
void clearCameraHandle(); void clearCameraHandle();
void initAlgorithmParameter(float &height_reference);
// void rebindTimer(int hDevice); // void rebindTimer(int hDevice);
void initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 face_frequency); void initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 face_frequency);
void updateImage(const cv::Mat & frame,qint64 currentTime); void updateImage(const cv::Mat & frame,qint64 currentTime);
void matToBase64(const cv::Mat &image, QByteArray &base64Data); void matToBase64(const cv::Mat &image, QByteArray &base64Data);
//把原始图片转换成不同区域的掩码
void matToAreaMask(const cv::Mat &source,std::map<int,cv::Mat> &maskFrame);
int callbackFunction(XSDK_HANDLE hObject,QString &szString); int callbackFunction(XSDK_HANDLE hObject,QString &szString);
void checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat & frame,RecognizedInfo& newInfo,int &result,std::map<int,RecognizedInfo>&exitAndMoMap);
void checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat & frame,RecognizedInfo& newInfo,int &result);
void licensePlateRecognitionResults(vides_data::requestLicensePlate &location); void licensePlateRecognitionResults(vides_data::requestLicensePlate &location);
void sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel); void sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel);
void printWifi(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg);
//设置相机连接的wifi
void sdkWifi(QString &pwd,QString &ssid);
//时间设置 //时间设置
void sdkDevSystemTimeZoneSyn(QString &time); void sdkDevSystemTimeZoneSyn(QString &time);
//录像设置 //录像设置
...@@ -85,13 +98,18 @@ public: ...@@ -85,13 +98,18 @@ public:
void sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id, void sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
QString startTimer,QString endTime); QString startTimer,QString endTime);
void batchRegionalPushLicensePlate(QByteArray &imgs,qint64 currentTime,vides_data::requestLicensePlate &newPlate);
void faceUniformOverlap(std::map<QString,vides_data::requestFaceReconition>&mapFaces,
std::vector<vides_data::ParkingArea> &uniforms,
std::list<QString>&outUniforms);
bool isClockwise(const std::vector<cv::Point2f>& polygon);
QString getSSn(); QString getSSn();
int getMediaHandle(); int getMediaHandle();
void setMediaHandle(int mediaHandle); void setMediaHandle(int mediaHandle);
void setCurrentFace(int currentFace);
void initAlgorithmPermissions(__uint8_t algorithm); void initAlgorithmPermissions(__uint8_t algorithm);
void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas); void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas);
...@@ -103,6 +121,7 @@ public: ...@@ -103,6 +121,7 @@ public:
std::map<QString, QString>&getCurrentData(); std::map<QString, QString>&getCurrentData();
bool isChanged(const QPoint& newInfo, const QPoint& current);
// 检查点是否在多边形内 // 检查点是否在多边形内
bool polygonsOverlap(ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2); bool polygonsOverlap(ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2);
// 计算两个多边形的交集面积 // 计算两个多边形的交集面积
...@@ -110,6 +129,8 @@ public: ...@@ -110,6 +129,8 @@ public:
double ccw(const QPointF& a, const QPointF& b, const QPointF& c); double ccw(const QPointF& a, const QPointF& b, const QPointF& c);
void getCurrentFrame(std::vector<uchar> &buffer); void getCurrentFrame(std::vector<uchar> &buffer);
bool isAnyOverlap(ParkingSpaceInfo *parkArea,std::vector<vides_data::ParkingArea> &currentPlates);
int findPointRegion(ParkingSpaceInfo &prakArea); int findPointRegion(ParkingSpaceInfo &prakArea);
int determineArea(ParkingSpaceInfo &prakArea); int determineArea(ParkingSpaceInfo &prakArea);
signals: signals:
...@@ -131,9 +152,6 @@ private : ...@@ -131,9 +152,6 @@ private :
SXSDKLoginParam *loginParam; SXSDKLoginParam *loginParam;
SXMediaFaceImageReq *sxMediaFaceImageReq; SXMediaFaceImageReq *sxMediaFaceImageReq;
std::mutex plateMutex;
std::mutex faceMutex;
QString sSn; QString sSn;
QString url; QString url;
std::map<int, vides_data::responseRecognitionData> videoCurrentData; std::map<int, vides_data::responseRecognitionData> videoCurrentData;
...@@ -144,8 +162,9 @@ private : ...@@ -144,8 +162,9 @@ private :
std::map<int,ParkingSpaceInfo*>parkMap; std::map<int,ParkingSpaceInfo*>parkMap;
//当前相机监视所以车位区域 //当前相机监视所以车位区域
std::vector<ParkingSpaceInfo*>parkingSpaceInfos; std::vector<ParkingSpaceInfo*>parkingSpaceInfos;
//当前人脸数 //当前人脸数和工作人数
int currentFace; QPoint faceMapWorker;
int mediaHandle; int mediaHandle;
//2秒钟抓一次图 //2秒钟抓一次图
......
...@@ -62,8 +62,8 @@ private: ...@@ -62,8 +62,8 @@ private:
QString videoOut; QString videoOut;
QString videoDownload; QString videoDownload;
QString images; QString images;
double carConfidenceMax; float carConfidenceMax;
double carConfidenceMin; float carConfidenceMin;
Common(); Common();
~Common(); ~Common();
......
...@@ -209,6 +209,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d ...@@ -209,6 +209,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
HInt32 featureNum; HInt32 featureNum;
HF_GetFeatureLength(ctxHandle, &featureNum); HF_GetFeatureLength(ctxHandle, &featureNum);
for(int j=0;j< multipleFaceData.detectedNum; ++j){ for(int j=0;j< multipleFaceData.detectedNum; ++j){
qDebug()<<QString("doesItExistEmployee==>面部索引: %1").arg(j);
std::vector<float> newfeature(featureNum,0.0f); std::vector<float> newfeature(featureNum,0.0f);
ret = HF_FaceFeatureExtractCpy(ctxHandle, imageSteamHandle, multipleFaceData.tokens[j], newfeature.data()); ret = HF_FaceFeatureExtractCpy(ctxHandle, imageSteamHandle, multipleFaceData.tokens[j], newfeature.data());
if(ret != HSUCCEED) { if(ret != HSUCCEED) {
......
...@@ -190,7 +190,6 @@ vides_data::response *HttpService::httpLicensePlateRecognition(vides_data::reque ...@@ -190,7 +190,6 @@ vides_data::response *HttpService::httpLicensePlateRecognition(vides_data::reque
}; };
item.insert("camera_location", cameraObject); item.insert("camera_location", cameraObject);
item.insert("img", QJsonValue::fromVariant(plate.img)); // 替换为真实的图像数据 item.insert("img", QJsonValue::fromVariant(plate.img)); // 替换为真实的图像数据
QJsonObject locationObject { QJsonObject locationObject {
{"bottom_right_corner_x", plate.recognition.bottomRightCornerX}, {"bottom_right_corner_x", plate.recognition.bottomRightCornerX},
...@@ -245,9 +244,11 @@ vides_data::response *HttpService::httpLicensePlateRecognition(vides_data::reque ...@@ -245,9 +244,11 @@ vides_data::response *HttpService::httpLicensePlateRecognition(vides_data::reque
// 将 res 添加到结果列表或进行其他操作 // 将 res 添加到结果列表或进行其他操作
} }
}else{ }else{
qDebug()<<m_httpClient.errorCode(); qDebug()<<"httpLicensePlateRecognition"<<m_httpClient.errorCode();
qDebug()<<"httpLicensePlateRecognition msg"<<m_httpClient.errorString();
resp->code=2; resp->code=2;
resp->msg=OPERATION_FAILED; resp->msg=m_httpClient.errorString();
} }
return resp; return resp;
} }
...@@ -288,12 +289,12 @@ vides_data::response* HttpService::httpFindFaceReconition(QString &serialNumber, ...@@ -288,12 +289,12 @@ vides_data::response* HttpService::httpFindFaceReconition(QString &serialNumber,
return resp; return resp;
} }
vides_data::response *HttpService::httpPostUniforms(QByteArray &img,int &number,QString sn,qint64 time){ vides_data::response *HttpService::httpPostUniforms(QByteArray &img,QString &id,QString sn,qint64 time){
httpUrl.append("/api/v1.0/recongnition/uniform"); httpUrl.append("/api/v1.0/recongnition/uniform");
QJsonObject json; QJsonObject json;
json.insert("img", QJsonValue::fromVariant(img)); json.insert("img", QJsonValue::fromVariant(img));
json.insert("sn",sn); json.insert("sn",sn);
json.insert("number",number); json.insert("id",id);
json.insert("time",QJsonValue::fromVariant(time)); json.insert("time",QJsonValue::fromVariant(time));
QJsonDocument jsonDoc; QJsonDocument jsonDoc;
jsonDoc.setObject(json); jsonDoc.setObject(json);
...@@ -320,12 +321,21 @@ vides_data::response *HttpService::httpPostUniforms(QByteArray &img,int &number, ...@@ -320,12 +321,21 @@ vides_data::response *HttpService::httpPostUniforms(QByteArray &img,int &number,
return resp; return resp;
} }
vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &number,QString sn,qint64 time){ vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &human,int &worker,QString sn,qint64 time){
httpUrl.append("/api/v1.0/recongnition/population"); httpUrl.append("/api/v1.0/recongnition/population");
QJsonObject json; QJsonObject json;
json.insert("img", QJsonValue::fromVariant(img)); json.insert("img", QJsonValue::fromVariant(img));
json.insert("sn",sn); json.insert("sn",sn);
json.insert("number",number);
QJsonObject jsonObject;
jsonObject.insert("human",human);
jsonObject.insert("worker", worker);
// 使用QJsonDocument来转换为字符串
QJsonDocument humanData(jsonObject);
QString jsonString = QString::fromUtf8(humanData.toJson(QJsonDocument::Compact));
json.insert("desc",jsonString);
json.insert("time",QJsonValue::fromVariant(time)); json.insert("time",QJsonValue::fromVariant(time));
QJsonDocument jsonDoc; QJsonDocument jsonDoc;
jsonDoc.setObject(json); jsonDoc.setObject(json);
......
...@@ -34,10 +34,10 @@ public: ...@@ -34,10 +34,10 @@ public:
//人脸识别推送 //人脸识别推送
vides_data::response *httpPostFaceReconition(vides_data::requestFaceReconition & faceReconition); vides_data::response *httpPostFaceReconition(vides_data::requestFaceReconition & faceReconition);
//人数变化推送 //人数变化推送
vides_data::response *httpPostFacePopulation(QByteArray &img,int &number,QString sn,qint64 time); vides_data::response *httpPostFacePopulation(QByteArray &img,int &human,int &worker,QString sn,qint64 time);
//工服推送 //工服推送
vides_data::response *httpPostUniforms(QByteArray &img,int &number,QString sn,qint64 time); vides_data::response *httpPostUniforms(QByteArray &img,QString &id,QString sn,qint64 time);
//客户端组列表 //客户端组列表
vides_data::response *httpFindStream(QString &serialNumber); vides_data::response *httpFindStream(QString &serialNumber);
...@@ -57,6 +57,7 @@ private: ...@@ -57,6 +57,7 @@ private:
QString httpUrl; QString httpUrl;
HttpClient m_httpClient; HttpClient m_httpClient;
QMutex m_httpClientMutex; QMutex m_httpClientMutex;
}; };
......
#include "Common.h"
#include "HumanDetection.h" #include "HumanDetection.h"
HumanDetection* HumanDetection::instance = nullptr; HumanDetection* HumanDetection::instance = nullptr;
HumanDetection::HumanDetection(){ HumanDetection::HumanDetection() : height_reference(250.0f) {
} }
HumanDetection::~HumanDetection(){ HumanDetection::~HumanDetection(){
} }
int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector *detector){
void HumanDetection::draw_human_on_image(const cv::Mat& image, const TCV_ObjectLocation* boxes, int size) {
for (int i = 0; i < size; ++i) {
const auto& box = boxes[i];
cv::Scalar color = cv::Scalar(0, 255, 0); // Green color for boxes
cv::Point topLeft(box.x1, box.y1);
cv::Point bottomRight(box.x2, box.y2);
cv::rectangle(image, topLeft, bottomRight, color, 2); // Draw rectangle on image
// Determine text to display based on the uniform value
std::string text;
switch (box.uniform) {
case 0: text = "Other"; break;
case 1: text = "Uniform 1"; break;
case 2: text = "Uniform 2"; break;
default: text = "Unknown"; break;
}
// Set text color and size
int fontFace = cv::FONT_HERSHEY_SIMPLEX;
double fontScale = 1.5;
int thickness = 2;
cv::Scalar textColor(0, 0, 255); // Red color for text
// Calculate text size to position it correctly
int baseline;
cv::Size textSize = cv::getTextSize(text, fontFace, fontScale, thickness, &baseline);
// Position text at the top center of the rectangle
cv::Point textOrigin(
topLeft.x + (bottomRight.x - topLeft.x)/2 - textSize.width/2,
topLeft.y - baseline - 2);
// Ensure the text is within the image
if (textOrigin.y < 0) textOrigin.y = bottomRight.y + textSize.height + 2;
cv::putText(image, text, textOrigin, fontFace, fontScale, textColor, thickness);
}
Common & instace= Common::getInstance();
//cv::imwrite("res.jpg", image); // Save the modified image
QString fileName = instace.getVideoOut().append(instace.getTimeString() +"置信度:"+ QString::number(boxes->score) + ".jpg");
bool success = cv::imwrite(fileName.toStdString(), image);
if (success) {
qDebug() << "车型图片已成功保存至:" << fileName;
} else {
qDebug() << "图片保存失败!";
}
}
void HumanDetection::setHeightReference(float &height_reference){
this->height_reference=height_reference;
}
//int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector *detector,
// std::vector<vides_data::ParkingArea> &currentPlate){
// TCV_CameraStream *stream = TCV_CreateCameraStream();
// TCV_CameraStreamSetData(stream, source.data, source.cols, source.rows);
// TCV_CameraStreamSetRotationMode(stream, TCV_CAMERA_ROTATION_0);
// TCV_CameraStreamSetStreamFormat(stream, TCV_STREAM_BGR);
// //0是人 1是车
// // 执行一帧目标检测
// TCV_HumanDetectorProcessFrame(detector, stream);
// int num=0;
// if(res==0x00 || res==0x02){
// num= TCV_HumanDetectorGetNumOfHuman(detector);
// if (num > 0 && res==0x02) {
// // 创建一个接收检测结果的对象数组
// TCV_ObjectLocation result[num];
// // 提取行人检测结果
// TCV_HumanDetectorGetHumanLocation(detector, result, num);
// int num_uniforms = 0;
// //工服
// for (int i = 0; i < num; ++i) {
// if (result[i].uniform == 0 && std::abs(result[i].y2 - result[i].y1)>=height_reference) {
// vides_data::ParkingArea area;
// area.topLeftCornerX=result[i].x1;
// area.topLeftCornerY=result[i].y1;
// area.bottomLeftCornerX=result[i].x1;
// area.bottomLeftCornerY=result[i].y2;
// area.topRightCornerX=result[i].x2;
// area.topRightCornerY=result[i].y1;
// area.bottomRightCornerX=result[i].x2;
// area.bottomRightCornerY=result[i].y2;
// currentPlate.push_back(area);
// ++num_uniforms;
// }
// }
// num=num_uniforms;
// }
// if( num > 0 && res==0x00){
// // 创建一个接收检测结果的对象数组
// TCV_ObjectLocation result[num];
// // 提取行人检测结果
// TCV_HumanDetectorGetHumanLocation(detector, result, num);
// int human_size = 0;
// //工服
// for (int i = 0; i < num; ++i) {
// if (std::abs(result[i].y2 - result[i].y1)>=height_reference) {
// vides_data::ParkingArea area;
// area.topLeftCornerX=result[i].x1;
// area.topLeftCornerY=result[i].y1;
// area.bottomLeftCornerX=result[i].x1;
// area.bottomLeftCornerY=result[i].y2;
// area.topRightCornerX=result[i].x2;
// area.topRightCornerY=result[i].y1;
// area.bottomRightCornerX=result[i].x2;
// area.bottomRightCornerY=result[i].y2;
// currentPlate.push_back(area);
// ++human_size;
// }
// }
// num=human_size;
// }
// qDebug() << (res == 0 ? "findHuManCar 检测到的人数:" : "findHuManCar 未穿工服的人数:") << num;
// }else if (res==0x01) {
// num=TCV_HumanDetectorGetNumOfCar(detector);
// TCV_ObjectLocation resultCar[num];
// TCV_HumanDetectorGetCarLocation(detector,resultCar,num);
// for (int i = 0; i < num; ++i) {
// vides_data::ParkingArea area;
// area.topLeftCornerX=resultCar[i].x1;
// area.topLeftCornerY=resultCar[i].y1;
// area.bottomLeftCornerX=resultCar[i].x1;
// area.bottomLeftCornerY=resultCar[i].y2;
// area.topRightCornerX=resultCar[i].x2;
// area.topRightCornerY=resultCar[i].y1;
// area.bottomRightCornerX=resultCar[i].x2;
// area.bottomRightCornerY=resultCar[i].y2;
// currentPlate.push_back(area);
// qDebug() << "score 检测到的汽车数量匹配度:" << resultCar[i].score;
// }
// qDebug() << "findHuManCar 检测到的汽车数量:" << num;
// }else {
// qDebug() << "参数错误";
// }
// TCV_ReleaseCameraStream(stream);
// return num;
//}
int HumanDetection::findHuManCar(const cv::Mat &source, int res, TCV_HumanDetector *detector, std::vector<vides_data::ParkingArea> &currentPlate) {
TCV_CameraStream *stream = TCV_CreateCameraStream(); TCV_CameraStream *stream = TCV_CreateCameraStream();
TCV_CameraStreamSetData(stream, source.data, source.cols, source.rows); TCV_CameraStreamSetData(stream, source.data, source.cols, source.rows);
TCV_CameraStreamSetRotationMode(stream, TCV_CAMERA_ROTATION_0); TCV_CameraStreamSetRotationMode(stream, TCV_CAMERA_ROTATION_0);
TCV_CameraStreamSetStreamFormat(stream, TCV_STREAM_BGR); TCV_CameraStreamSetStreamFormat(stream, TCV_STREAM_BGR);
//0是人 1是车
// 执行一帧目标检测
TCV_HumanDetectorProcessFrame(detector, stream); TCV_HumanDetectorProcessFrame(detector, stream);
int num=0; int num = 0;
if(res==0x00 || res==0x02){
num= TCV_HumanDetectorGetNumOfHuman(detector); if (res == 0x00 || res == 0x02) {
if (num > 0 && res==0x02) { num = TCV_HumanDetectorGetNumOfHuman(detector);
// 创建一个接收检测结果的对象数组 if (num == 0) return num; // 无行人检测结果,提前返回
TCV_ObjectLocation result[num];
// 提取行人检测结果 std::vector<TCV_ObjectLocation> results(num);
TCV_HumanDetectorGetHumanLocation(detector, result, num); TCV_HumanDetectorGetHumanLocation(detector, results.data(), num);
int num_uniforms = 0;
//工服 int count = 0;
for (int i = 0; i < num; ++i) { for (const auto &person : results) {
if (result[i].uniform == 0) { if ((res == 0x02 && person.uniform == 0) || res == 0x00) {
++num_uniforms; if (std::abs(person.y2 - person.y1) >= height_reference) {
} vides_data::ParkingArea area;
} area.topLeftCornerX=person.x1;
num=num_uniforms; area.topLeftCornerY=person.y1;
area.bottomLeftCornerX=person.x1;
area.bottomLeftCornerY=person.y2;
area.topRightCornerX=person.x2;
area.topRightCornerY=person.y1;
area.bottomRightCornerX=person.x2;
area.bottomRightCornerY=person.y2;
currentPlate.push_back(area);
++count;
}
}
} }
num = count; // 更新num为实际计数
qDebug() << (res == 0 ? "findHuManCar 检测到的人数:" : "findHuManCar 未穿工服的人数:") << num; qDebug() << (res == 0 ? "findHuManCar 检测到的人数:" : "findHuManCar 未穿工服的人数:") << num;
} else if (res == 0x01) {
num = TCV_HumanDetectorGetNumOfCar(detector);
if (num == 0) return num; // 无车辆检测结果,提前返回
}else if (res==0x01) { std::vector<TCV_ObjectLocation> resultCars(num);
num=TCV_HumanDetectorGetNumOfCar(detector); TCV_HumanDetectorGetCarLocation(detector, resultCars.data(), num);
qDebug() << "findHuManCar 检测到的汽车数量:" << num;
}else { for (const auto &car : resultCars) {
vides_data::ParkingArea area;
area.topLeftCornerX=car.x1;
area.topLeftCornerY=car.y1;
area.bottomLeftCornerX=car.x1;
area.bottomLeftCornerY=car.y2;
area.topRightCornerX=car.x2;
area.topRightCornerY=car.y1;
area.bottomRightCornerX=car.x2;
area.bottomRightCornerY=car.y2;
currentPlate.push_back(area);
qDebug() << "score 检测到的汽车数量匹配度:" << car.score;
}
qDebug() << "findHuManCar 检测到的汽车数量:" << num;
} else {
qDebug() << "参数错误"; qDebug() << "参数错误";
} }
TCV_ReleaseCameraStream(stream); TCV_ReleaseCameraStream(stream);
return num; return num;
} }
#ifndef HUMANDETECTION_H #ifndef HUMANDETECTION_H
#define HUMANDETECTION_H #define HUMANDETECTION_H
#include "VidesData.h"
#include "so_human_sdk.h" #include "so_human_sdk.h"
#include <opencv2/opencv.hpp> #include <opencv2/opencv.hpp>
#include <QDebug> #include <QDebug>
...@@ -9,17 +10,21 @@ public: ...@@ -9,17 +10,21 @@ public:
HumanDetection(); HumanDetection();
~HumanDetection(); ~HumanDetection();
void initDetector(); void initDetector();
int findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector *detector); int findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector *detector,std::vector<vides_data::ParkingArea> &currentPlate);
static HumanDetection& getInstance() static HumanDetection& getInstance()
{ {
static HumanDetection instance; static HumanDetection instance;
return instance; return instance;
} }
void setHeightReference(float &height_reference);
void draw_human_on_image(const cv::Mat& image, const TCV_ObjectLocation* boxes, int size);
private: private:
static HumanDetection* instance; static HumanDetection* instance;
//高度基准
float height_reference;
}; };
......
...@@ -46,7 +46,7 @@ void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const ...@@ -46,7 +46,7 @@ void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const
char* m_path=by_mpath.data(); char* m_path=by_mpath.data();
configuration.models_path = m_path; configuration.models_path = m_path;
configuration.max_num = 5; configuration.max_num = 5;
configuration.det_level = DETECT_LEVEL_LOW; configuration.det_level = DETECT_LEVEL_HIGH;
configuration.use_half = false; configuration.use_half = false;
configuration.nms_threshold = 0.5f; configuration.nms_threshold = 0.5f;
configuration.rec_confidence_threshold = 0.8f; configuration.rec_confidence_threshold = 0.8f;
...@@ -89,7 +89,29 @@ void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const ...@@ -89,7 +89,29 @@ void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const
HLPR_ReleaseContext(ctx1); HLPR_ReleaseContext(ctx1);
} }
void LicensePlateRecognition::replaceWith1And0(QString &code) {
code.replace(QRegularExpression("[Ii]"), "1");
code.replace(QRegularExpression("[Oo]"), "0");
}
void LicensePlateRecognition::filterLicensePlateConfidenceMax(vides_data::requestLicensePlate &plate, vides_data::LicensePlate &max) {
std::list<vides_data::LicensePlate> &plates = plate.plates; // 使用引用避免复制列表
if (plates.empty()) { // 检查列表是否为空
// 如果列表为空,可能需要设定一个默认值或者抛出异常,这里简单地不改变max
return;
}
max = plates.front(); // 初始化max为第一个元素
float maxConfidence = max.text_confidence;
for (auto it = plates.begin(); it != plates.end(); ++it) {
if (it->text_confidence > maxConfidence) {
max = *it; // 发现更高信心值的LicensePlate,更新max
maxConfidence = it->text_confidence;
}
}
}
void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate, void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime,P_HLPR_Context ctx) { qint64 currentTime,P_HLPR_Context ctx) {
...@@ -130,7 +152,10 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString ...@@ -130,7 +152,10 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
vides_data::LicensePlate newPlate; vides_data::LicensePlate newPlate;
newPlate.time=currentTime; newPlate.time=currentTime;
newPlate.new_color=QString::fromStdString(type); newPlate.new_color=QString::fromStdString(type);
newPlate.new_plate=QString::fromUtf8(results.plates[i].code); QString car_nuber=QString::fromUtf8(results.plates[i].code);
replaceWith1And0(car_nuber);
qDebug()<<"I O (i o)大小写替换为 1 0结果:==>"<<car_nuber;
newPlate.new_plate=car_nuber;
newPlate.text_confidence=results.plates[i].text_confidence; newPlate.text_confidence=results.plates[i].text_confidence;
vides_data::ParkingArea area; vides_data::ParkingArea area;
area.topLeftCornerX=results.plates[i].x1; area.topLeftCornerX=results.plates[i].x1;
......
...@@ -9,6 +9,7 @@ ...@@ -9,6 +9,7 @@
#include <QFile> #include <QFile>
#include <QImage> #include <QImage>
#include <mutex> #include <mutex>
#include <QRegularExpression>
const std::vector<std::string> types = const std::vector<std::string> types =
{"蓝牌", "黄牌单层", "白牌单层", "绿牌新能源", "黑牌港澳", {"蓝牌", "黄牌单层", "白牌单层", "绿牌新能源", "黑牌港澳",
...@@ -24,10 +25,11 @@ public: ...@@ -24,10 +25,11 @@ public:
void licensePlateNumber(const cv::Mat &source,QString & lpNumber, vides_data::requestLicensePlate &plate, void licensePlateNumber(const cv::Mat &source,QString & lpNumber, vides_data::requestLicensePlate &plate,
qint64 currentTime,P_HLPR_Context ctx); qint64 currentTime,P_HLPR_Context ctx);
void filterLicensePlateConfidenceMax(vides_data::requestLicensePlate &plate,vides_data::LicensePlate &max);
void oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber); void oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber);
// void initHlprContext(const QString &modelPaths,const QString &carCascade,float carConfidence); // void initHlprContext(const QString &modelPaths,const QString &carCascade,float carConfidence);
void replaceWith1And0( QString &code);
private: private:
static LicensePlateRecognition* instance; static LicensePlateRecognition* instance;
......
...@@ -58,7 +58,7 @@ static int sdkInitCallback(XSDK_HANDLE hObject, int nMsgId, int nParam1, ...@@ -58,7 +58,7 @@ static int sdkInitCallback(XSDK_HANDLE hObject, int nMsgId, int nParam1,
QString qString(szString); QString qString(szString);
CameraHandle* cameraHandle= mediaFaceImage->getCurrentDevice().at(hObject); CameraHandle* cameraHandle= mediaFaceImage->getCurrentDevice().at(hObject);
QThreadPool* threadPool = QThreadPool::globalInstance(); QThreadPool* threadPool = QThreadPool::globalInstance();
threadPool->setMaxThreadCount(12);
auto taskCallBack=std::bind(&CameraHandle::callbackFunction, cameraHandle, hObject, qString); auto taskCallBack=std::bind(&CameraHandle::callbackFunction, cameraHandle, hObject, qString);
auto taskRunnable = new TaskRunnable(taskCallBack, hObject,cameraHandle->getChannel(), RunFunction::SdkCallbackFunction); auto taskRunnable = new TaskRunnable(taskCallBack, hObject,cameraHandle->getChannel(), RunFunction::SdkCallbackFunction);
// task->setAutoDelete(false); // 确保task不会在执行后被自动删除 // task->setAutoDelete(false); // 确保task不会在执行后被自动删除
......
...@@ -9,12 +9,15 @@ public: ...@@ -9,12 +9,15 @@ public:
ParkingSpaceInfo(RecognizedInfo & currentPlate); ParkingSpaceInfo(RecognizedInfo & currentPlate);
ParkingSpaceInfo(); ParkingSpaceInfo();
~ParkingSpaceInfo(); ~ParkingSpaceInfo();
RecognizedInfo& getCurrentPlate(); RecognizedInfo& getCurrentPlate();
void setCurrentPlate(RecognizedInfo & current); void setCurrentPlate(RecognizedInfo & current);
void addQueue(RecognizedInfo &info); void addQueue(RecognizedInfo &info);
void removeQueue(); void removeQueue();
void removeNoQueue(); void removeNoQueue();
QQueue<RecognizedInfo> &getQueue(); QQueue<RecognizedInfo> &getQueue();
void setArea(vides_data::ParkingArea &a); void setArea(vides_data::ParkingArea &a);
vides_data::ParkingArea &getArea(); vides_data::ParkingArea &getArea();
......
...@@ -11,6 +11,10 @@ ...@@ -11,6 +11,10 @@
#include <QTextStream> #include <QTextStream>
#include <QByteArray> #include <QByteArray>
#include <QNetworkInterface> #include <QNetworkInterface>
#include <iostream>
#include <cstdio>
#include <string>
#include <sstream>
#include <list> #include <list>
namespace vides_data{ namespace vides_data{
constexpr const char *HEADER_TYPE_KAY="Content-Type"; constexpr const char *HEADER_TYPE_KAY="Content-Type";
...@@ -147,7 +151,7 @@ struct LicensePlate ...@@ -147,7 +151,7 @@ struct LicensePlate
QString new_color; QString new_color;
QByteArray img; QByteArray img;
qint64 time; qint64 time;
ParkingArea recognition; ParkingArea recognition;//识别区域
float text_confidence; float text_confidence;
LicensePlate() {} LicensePlate() {}
}; };
...@@ -258,6 +262,7 @@ inline QString getDefaultGateway() { ...@@ -258,6 +262,7 @@ inline QString getDefaultGateway() {
#endif #endif
return gateway; return gateway;
} }
inline bool pingAddress(const QString &address) { inline bool pingAddress(const QString &address) {
QProcess process; QProcess process;
QString program = "ping"; QString program = "ping";
......
QT += core gui network multimedia sql concurrent QT += core gui network multimedia sql concurrent
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
CONFIG += c++11 CONFIG += c++11
...@@ -11,46 +12,46 @@ TEMPLATE = app ...@@ -11,46 +12,46 @@ TEMPLATE = app
# depend on your compiler). Please consult the documentation of the # depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it. # deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.0.1\\\" DEFINES += APP_VERSION=\\\"1.0.2\\\"
QMAKE_LIBDIR += /usr/local/lib #QMAKE_LIBDIR += /usr/local/lib
INCLUDEPATH+=/usr/local/include/opencv4 #INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface #INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper #INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK #INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human #INCLUDEPATH+=/usr/local/include/human
INCLUDEPATH+=/usr/local/include/CImg #INCLUDEPATH+=/usr/local/include/CImg
#unix:contains(QMAKE_HOST.arch, x86_64) { unix:contains(QMAKE_HOST.arch, x86_64) {
# QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
#} }
#unix:contains(QMAKE_HOST.arch, arm) { unix:contains(QMAKE_HOST.arch, arm) {
# QMAKE_LIBDIR += /usr/local/lib QMAKE_LIBDIR += /usr/local/lib
#} }
## 根据编译器类型选择库路径和头文件路径 # 根据编译器类型选择库路径和头文件路径
#unix: { unix: {
# # x86 架构 # x86 架构
# contains(QMAKE_HOST.arch, x86_64) { contains(QMAKE_HOST.arch, x86_64) {
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4 INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/CImg INCLUDEPATH+=/home/mark/Public/x86_opencv/include/CImg
# } }
# # ARM 架构 # ARM 架构
# contains(QMAKE_HOST.arch, arm) { contains(QMAKE_HOST.arch, arm) {
# INCLUDEPATH+=/usr/local/include/opencv4 INCLUDEPATH+=/usr/local/include/opencv4
# INCLUDEPATH+=/usr/local/include/hyperface INCLUDEPATH+=/usr/local/include/hyperface
# INCLUDEPATH+=/usr/local/include/hyper INCLUDEPATH+=/usr/local/include/hyper
# INCLUDEPATH+=/usr/local/include/XNetSDK INCLUDEPATH+=/usr/local/include/XNetSDK
# INCLUDEPATH+=/usr/local/include/human INCLUDEPATH+=/usr/local/include/human
# } }
#} }
# You can also make your code fail to compile if it uses deprecated APIs. # You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line. # In order to do so, uncomment the following line.
......
...@@ -63,7 +63,17 @@ MainWindow::MainWindow() ...@@ -63,7 +63,17 @@ MainWindow::MainWindow()
connect(dePermissionSynTimer, &QTimer::timeout, this, [this, httpurl](){ connect(dePermissionSynTimer, &QTimer::timeout, this, [this, httpurl](){
this->startCamera(httpurl); this->startCamera(httpurl);
},Qt::QueuedConnection); },Qt::QueuedConnection);
dePermissionSynTimer->start(dePermissionTimer);
this->startCamera(httpurl);
// 设置定时器间隔
dePermissionSynTimer->setInterval(dePermissionTimer);
// 启动定时器
dePermissionSynTimer->start();
//dePermissionSynTimer->start(dePermissionTimer);
//vides_data::scanWiFiNetworks();
connect(&server, &QTcpServer::newConnection, this, &MainWindow::handleMatNewConnection); connect(&server, &QTcpServer::newConnection, this, &MainWindow::handleMatNewConnection);
...@@ -76,6 +86,7 @@ MainWindow::MainWindow() ...@@ -76,6 +86,7 @@ MainWindow::MainWindow()
} }
} }
CameraHandle* MainWindow::findHandle(QString sn){ CameraHandle* MainWindow::findHandle(QString sn){
for (auto it = faceDetectionParkingPushs.begin(); it != faceDetectionParkingPushs.end(); ++it) { for (auto it = faceDetectionParkingPushs.begin(); it != faceDetectionParkingPushs.end(); ++it) {
QString currentSn = it->second->getSSn(); QString currentSn = it->second->getSSn();
...@@ -751,8 +762,11 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std:: ...@@ -751,8 +762,11 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
float carConfidence=qSetting->value("devices/carConfidence").toFloat(); float carConfidence=qSetting->value("devices/carConfidence").toFloat();
int image_save=qSetting->value("devices/image_save").toInt(); int image_save=qSetting->value("devices/image_save").toInt();
float heightReference=qSetting->value("devices/height_reference").toFloat();
CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,modelPaths,carConfidence,image_save); float carShapeConfidence=qSetting->value("devices/carShapeConfidence").toFloat();
CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,modelPaths,carConfidence,carShapeConfidence,image_save);
int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,10000); int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,10000);
qDebug()<<"句柄为2:"<<sdk_handle; qDebug()<<"句柄为2:"<<sdk_handle;
...@@ -763,11 +777,16 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std:: ...@@ -763,11 +777,16 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
initDevConfigSyn(cameraHandle); initDevConfigSyn(cameraHandle);
mediaFaceImage->setMap(sdk_handle,cameraHandle); mediaFaceImage->setMap(sdk_handle,cameraHandle);
cameraHandle->sdkDevSetAlarmListener(sdk_handle,1); cameraHandle->sdkDevSetAlarmListener(sdk_handle,0);
int synTime=qSetting->value("timer/dev_snap_syn_timer").toInt(); int synTime=qSetting->value("timer/dev_snap_syn_timer").toInt();
uint64 face_frequency=qSetting->value("devices/face_frequency").toULongLong(); uint64 face_frequency=qSetting->value("devices/face_frequency").toULongLong();
cameraHandle->initSdkRealTimeDevSnapSyn(sdk_handle,synTime,face_frequency); cameraHandle->initSdkRealTimeDevSnapSyn(sdk_handle,synTime,face_frequency);
cameraHandle->initAlgorithmParameter(heightReference);
QString pwd="admin2024";
QString sid="MERCURY_8C4F";
cameraHandle->sdkWifi(pwd,sid);
vides_data::requestCameraInfo camera_info; vides_data::requestCameraInfo camera_info;
camera_info.sSn=parameter.sSn; camera_info.sSn=parameter.sSn;
camera_info.ip_addr=parameter.sDevId; camera_info.ip_addr=parameter.sDevId;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment