Commit 4b67a9ac by liusq

新增绿牌,蓝牌长度限制和信号自动析构

parent f5c6f1e3
#include "CameraHandle.h" #include "CameraHandle.h"
#include "TaskRunnable.h" #include "TaskRunnable.h"
#include "HumanDetection.h" #include "HumanDetection.h"
#include "ScopeSemaphoreExit.h"
#include <QRegularExpression> #include <QRegularExpression>
CameraHandle::CameraHandle(){ CameraHandle::CameraHandle(){
} }
CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &channel,const QString &modelPaths, float carConfidence,int imageSave) CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &channel,const QString &modelPaths, float carConfidence,int imageSave)
: hDevice(-1), : hDevice(-1),
...@@ -14,14 +15,13 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch ...@@ -14,14 +15,13 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch
channel(channel), channel(channel),
httpUrl(httpUrl), httpUrl(httpUrl),
dev_snap_syn_timer(new QTimer()), dev_snap_syn_timer(new QTimer()),
release_timer(new QTimer()),
image_save(imageSave), image_save(imageSave),
semaphore(1) { semaphore(1) {
connect(this, SIGNAL(afterDownloadFile(int,int,QString)), this, SLOT(pushRecordToCloud(int,int,QString)),Qt::QueuedConnection); connect(this, SIGNAL(afterDownloadFile(int,int,QString)), this, SLOT(pushRecordToCloud(int,int,QString)),Qt::QueuedConnection);
detector = TCV_CreateHumanDetector(); detector = TCV_CreateHumanDetector();
// 设置检测得分阈值 默认0.5 // 设置检测得分阈值 默认0.5
TCV_HumanDetectorSetScoreThreshold(detector, 0.5f); TCV_HumanDetectorSetScoreThreshold(detector, 0.8f);
HLPR_ContextConfiguration configuration = {0}; HLPR_ContextConfiguration configuration = {0};
QByteArray && by_mpath=modelPaths.toUtf8(); QByteArray && by_mpath=modelPaths.toUtf8();
char* m_path=by_mpath.data(); char* m_path=by_mpath.data();
...@@ -34,15 +34,13 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch ...@@ -34,15 +34,13 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch
configuration.box_conf_threshold = 0.30f; configuration.box_conf_threshold = 0.30f;
configuration.threads = 1; configuration.threads = 1;
ctx = HLPR_CreateContext(&configuration); ctx = HLPR_CreateContext(&configuration);
connect(release_timer, &QTimer::timeout, this, &CameraHandle::releaseSemaphore);
} }
CameraHandle::~CameraHandle() { CameraHandle::~CameraHandle() {
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
dev_snap_syn_timer->stop(); dev_snap_syn_timer->stop();
instace.deleteObj(dev_snap_syn_timer); instace.deleteObj(dev_snap_syn_timer);
instace.deleteObj(release_timer);
instace.deleteObj(loginParam); instace.deleteObj(loginParam);
instace.deleteObj(sxMediaFaceImageReq); instace.deleteObj(sxMediaFaceImageReq);
if(detector!=nullptr){ if(detector!=nullptr){
...@@ -54,22 +52,22 @@ CameraHandle::~CameraHandle() { ...@@ -54,22 +52,22 @@ CameraHandle::~CameraHandle() {
} }
parkMap.clear(); parkMap.clear();
QThreadPool::globalInstance()->waitForDone(); QThreadPool::globalInstance()->waitForDone();
} }
int CameraHandle::sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout) { int CameraHandle::sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout) {
QByteArray byteArray = sDevId.toUtf8(); QByteArray byteArray = sDevId.toUtf8();
char* cDevid=byteArray.data(); char* cDevid=byteArray.data();
strcpy(loginParam->sDevId, cDevid); strcpy(loginParam->sDevId, cDevid);
loginParam->nDevPort=nDevPort; loginParam->nDevPort=nDevPort;
QByteArray byteName = sUserName.toUtf8(); QByteArray byteName = sUserName.toUtf8();
char* cName=byteName.data(); char* cName=byteName.data();
strcpy(loginParam->sUserName, cName); strcpy(loginParam->sUserName, cName);
if(sPassword.length()>0){ if(sPassword.length()>0){
QByteArray bytePassword = sPassword.toUtf8(); QByteArray bytePassword = sPassword.toUtf8();
strcpy(loginParam->sPassword, bytePassword.constData()); strcpy(loginParam->sPassword, bytePassword.constData());
...@@ -98,17 +96,17 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -98,17 +96,17 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
// 帧信息 // 帧信息
SXSDK_FRAME_INFO* pFrame = (SXSDK_FRAME_INFO*)pDataInfo; SXSDK_FRAME_INFO* pFrame = (SXSDK_FRAME_INFO*)pDataInfo;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
if (pFrame->nType == XSDK_FRAME_TYPE_VIDEO && pFrame->nSubType == XSDK_FRAME_TYPE_VIDEO_I_FRAME) if (pFrame->nType == XSDK_FRAME_TYPE_VIDEO && pFrame->nSubType == XSDK_FRAME_TYPE_VIDEO_I_FRAME)
{ {
//printf("[%d]::OnFrame[Len:%d][Type:%d/%d][%04d-%02d-%02d %02d:%02d:%02d-%03d]\r\n", hMedia, pFrame->nLength, pFrame->nType, pFrame->nSubType, pFrame->nYear, pFrame->nMonth, pFrame->nDay, pFrame->nHour, pFrame->nMinute, pFrame->nSecond, (int)(pFrame->nTimeStamp % 1000)); //printf("[%d]::OnFrame[Len:%d][Type:%d/%d][%04d-%02d-%02d %02d:%02d:%02d-%03d]\r\n", hMedia, pFrame->nLength, pFrame->nType, pFrame->nSubType, pFrame->nYear, pFrame->nMonth, pFrame->nDay, pFrame->nHour, pFrame->nMinute, pFrame->nSecond, (int)(pFrame->nTimeStamp % 1000));
} }
if (cameraHandle->getMediaHandle() > 0 && cameraHandle->getMediaHandle()== hMedia) if (cameraHandle->getMediaHandle() > 0 && cameraHandle->getMediaHandle()== hMedia)
{ {
if (pFrame->nSubType == XSDK_ENCODE_VIDEO_JPEG) if (pFrame->nSubType == XSDK_ENCODE_VIDEO_JPEG)
{ {
mediaFaceImage->AbFile(cFname, pFrame->pContent, pFrame->nFrameLength); mediaFaceImage->AbFile(cFname, pFrame->pContent, pFrame->nFrameLength);
} }
else else
...@@ -119,7 +117,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -119,7 +117,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
mediaFaceImage->AbFile(cFname, pFrame->pHeader, pFrame->nLength); mediaFaceImage->AbFile(cFname, pFrame->pHeader, pFrame->nLength);
} }
} }
} }
else if (ESXSDK_MEDIA_START_REAL_PLAY == nDataType else if (ESXSDK_MEDIA_START_REAL_PLAY == nDataType
|| ESXSDK_MEDIA_START_FACE_IMAGE == nDataType || ESXSDK_MEDIA_START_FACE_IMAGE == nDataType
...@@ -130,7 +128,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -130,7 +128,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
) )
{ {
int& nResult = nDataLen; int& nResult = nDataLen;
} }
else if (EXCMD_MONITOR_DATA == nDataType else if (EXCMD_MONITOR_DATA == nDataType
|| EXCMD_DOWNLOAD_DATA == nDataType || EXCMD_DOWNLOAD_DATA == nDataType
...@@ -144,16 +142,16 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -144,16 +142,16 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
{ {
printf("[%d]::OnMedia[%d][DataLen:%d]\r\n", hMedia, nDataType, nDataLen); printf("[%d]::OnMedia[%d][DataLen:%d]\r\n", hMedia, nDataType, nDataLen);
}*/ }*/
if (EXCMD_DOWNLOAD_DATA == nDataType) if (EXCMD_DOWNLOAD_DATA == nDataType)
{ {
qDebug()<<"EXCMD_DOWNLOAD_DATA"<<nDataType; qDebug()<<"EXCMD_DOWNLOAD_DATA"<<nDataType;
/*if (g_hRecordDownload > 0 && g_hRecordDownload == hMedia) /*if (g_hRecordDownload > 0 && g_hRecordDownload == hMedia)
{ {
std::string::size_type pos = g_test.sDownloadFileName.rfind('.'); std::string::size_type pos = g_test.sDownloadFileName.rfind('.');
std::string strSuffix = g_test.sDownloadFileName.substr(pos + 1, pos + 2); std::string strSuffix = g_test.sDownloadFileName.substr(pos + 1, pos + 2);
if (STRCMP(strSuffix.c_str(), "jpg") == 0) if (STRCMP(strSuffix.c_str(), "jpg") == 0)
{ {
const char* pWriteData = (const char*)pData; const char* pWriteData = (const char*)pData;
...@@ -161,10 +159,10 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -161,10 +159,10 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
{ {
pWriteData += 16; pWriteData += 16;
nDataLen -= 16; nDataLen -= 16;
++stInit; ++stInit;
} }
XFILE::ABFile(g_test.sDownloadFileName.c_str(), pWriteData, nDataLen); XFILE::ABFile(g_test.sDownloadFileName.c_str(), pWriteData, nDataLen);
} }
else else
...@@ -182,7 +180,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -182,7 +180,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
{ {
//int& nState = nDataLen; //int& nState = nDataLen;
//printf("[%d]::OnMediaStateChannged[nState:%d]\r\n", hMedia, nState); //printf("[%d]::OnMediaStateChannged[nState:%d]\r\n", hMedia, nState);
if (nDataLen == EState_Media_DataEnd) if (nDataLen == EState_Media_DataEnd)
{ {
if (cameraHandle->getMediaHandle() > 0) if (cameraHandle->getMediaHandle() > 0)
...@@ -192,11 +190,11 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -192,11 +190,11 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
QString mp4FileName =dName; QString mp4FileName =dName;
mp4FileName.replace(QRegularExpression("\\.[^\\.]*$"), ".mp4"); mp4FileName.replace(QRegularExpression("\\.[^\\.]*$"), ".mp4");
data["downloadFileName"]=mp4FileName; data["downloadFileName"]=mp4FileName;
QProcess ffmpegProcess; QProcess ffmpegProcess;
QStringList arguments; QStringList arguments;
arguments << "-i" << dName <<"-c:v" << "copy" << mp4FileName; arguments << "-i" << dName <<"-c:v" << "copy" << mp4FileName;
ffmpegProcess.start("ffmpeg", arguments); ffmpegProcess.start("ffmpeg", arguments);
// 等待 ffmpeg 进程结束 // 等待 ffmpeg 进程结束
if (ffmpegProcess.waitForFinished(20000)) { if (ffmpegProcess.waitForFinished(20000)) {
...@@ -205,7 +203,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -205,7 +203,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
} else { } else {
qDebug() << "Error: ffmpeg process did not finish."; qDebug() << "Error: ffmpeg process did not finish.";
} }
// 销毁 QProcess 对象 // 销毁 QProcess 对象
ffmpegProcess.close(); ffmpegProcess.close();
QFileInfo fileInfo(mp4FileName); QFileInfo fileInfo(mp4FileName);
...@@ -221,7 +219,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -221,7 +219,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
} }
} }
} }
} }
int CameraHandle::sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener) { int CameraHandle::sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener) {
return XSDK_DevSetAlarmListener(hDevice,bListener); return XSDK_DevSetAlarmListener(hDevice,bListener);
...@@ -240,23 +238,26 @@ void CameraHandle::getCurrentFrame(std::vector<uchar> &buffer){ ...@@ -240,23 +238,26 @@ void CameraHandle::getCurrentFrame(std::vector<uchar> &buffer){
} }
void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer) { void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 face_frequency) {
connect(dev_snap_syn_timer, &QTimer::timeout, this, [this,hDevice]() { connect(dev_snap_syn_timer, &QTimer::timeout, this, [this,hDevice]() {
this->sdkRealTimeDevSnapSyn(hDevice); this->sdkRealTimeDevSnapSyn(hDevice);
},Qt::QueuedConnection); },Qt::QueuedConnection);
this->face_frequency=face_frequency;
dev_snap_syn_timer->start(syn_timer); dev_snap_syn_timer->start(syn_timer);
} }
void CameraHandle::sdkRealTimeDevSnapSyn(int hDevice) { void CameraHandle::sdkRealTimeDevSnapSyn(int hDevice) {
QThreadPool* threadPool = QThreadPool::globalInstance(); QThreadPool* threadPool = QThreadPool::globalInstance();
threadPool->setMaxThreadCount(8); threadPool->setMaxThreadCount(8);
auto taskSyn = std::bind(&CameraHandle::sdkDevSnapSyn, this, hDevice, this->channel); auto taskSyn = std::bind(&CameraHandle::sdkDevSnapSyn, this, hDevice, this->channel);
auto taskRunnable = new TaskRunnable(taskSyn, hDevice, this->channel, RunFunction::SdkDevSnapSyn); auto taskRunnable = new TaskRunnable(taskSyn, hDevice, this->channel, RunFunction::SdkDevSnapSyn);
threadPool->start(taskRunnable); threadPool->start(taskRunnable);
} }
QString CameraHandle::getSSn(){ QString CameraHandle::getSSn(){
return sSn; return sSn;
...@@ -294,12 +295,12 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id, ...@@ -294,12 +295,12 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
newSn.append(szTime); newSn.append(szTime);
//downloadFileName=QString("%1/%2.h264").arg(videoPath, szTime); //downloadFileName=QString("%1/%2.h264").arg(videoPath, szTime);
currentData["downloadFileName"] =QString("%1%2.h264").arg(videoPath, newSn); currentData["downloadFileName"] =QString("%1%2.h264").arg(videoPath, newSn);
SXMediaRecordReq param = { 0 }; SXMediaRecordReq param = { 0 };
QByteArray bStart =startTimer.toUtf8(); QByteArray bStart =startTimer.toUtf8();
const char* cStart=bStart.data(); const char* cStart=bStart.data();
QByteArray bEnd=endTime.toUtf8(); QByteArray bEnd=endTime.toUtf8();
const char* cEnd=bEnd.data(); const char* cEnd=bEnd.data();
strcpy(param.sBeginTime, cStart); // 请求的开始时间(必填) strcpy(param.sBeginTime, cStart); // 请求的开始时间(必填)
...@@ -311,37 +312,25 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id, ...@@ -311,37 +312,25 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
SMsgReceiver sms(nullptr,XNetSDK_MediaCallBack,this); SMsgReceiver sms(nullptr,XNetSDK_MediaCallBack,this);
param.result=sms; param.result=sms;
qDebug() << "XSDK_MediaRecordDownload hDevice:"<<this->hDevice; qDebug() << "XSDK_MediaRecordDownload hDevice:"<<this->hDevice;
this->mediaHandle = XSDK_MediaRecordDownload(this->hDevice, &param, 0, 4000); this->mediaHandle = XSDK_MediaRecordDownload(this->hDevice, &param, 0, 4000);
if ( this->mediaHandle < 0) if ( this->mediaHandle < 0)
{ {
qInfo() << "XSDK_MediaRecordDownload Failed:"<< this->mediaHandle ; qInfo() << "XSDK_MediaRecordDownload Failed:"<< this->mediaHandle ;
return ; return ;
} }
}
bool CameraHandle::acquireAndReleaseWithTimeout(bool flag) {
if (!semaphore.tryAcquire()) {
qInfo() << (flag ? "callbackFunction:正在执行线程 " : "sdkDevSnapSyn:正在执行线程");
return true;
}
QMetaObject::invokeMethod(release_timer, "start",
Qt::QueuedConnection,
Q_ARG(int, timeoutMs));
return false;
}
void CameraHandle::releaseSemaphore() {
if (release_timer->isActive()) {
QMetaObject::invokeMethod(release_timer, "stop", Qt::QueuedConnection);
}
semaphore.release();
}
}
int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) { int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
if (!semaphore.tryAcquire()) {
qInfo() << "sdkDevSnapSyn:正在执行线程";
return -1;
}
ScopeSemaphoreExit guard([this]() {
semaphore.release(); // 释放信号量
});
QByteArray && byJson = szString.toLocal8Bit(); QByteArray && byJson = szString.toLocal8Bit();
const char * cJson= byJson.data(); const char * cJson= byJson.data();
XSDK_CFG::AlarmInfo alarmInfo; XSDK_CFG::AlarmInfo alarmInfo;
...@@ -354,47 +343,50 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) { ...@@ -354,47 +343,50 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
<< "\r\nChannel:" << alarmInfo.Channel.Value() << "\r\nChannel:" << alarmInfo.Channel.Value()
<< "\r\nStartTime:" << alarmInfo.StartTime.Value() << "\r\nStartTime:" << alarmInfo.StartTime.Value()
<< "\r\nStatus:" << alarmInfo.Status.Value(); << "\r\nStatus:" << alarmInfo.Status.Value();
} }
else else
{ {
qDebug() << "OnDevAlarmCallback[Dev:" << hObject << "][Event:" << szString << "]"; qDebug() << "OnDevAlarmCallback[Dev:" << hObject << "][Event:" << szString << "]";
} }
if(acquireAndReleaseWithTimeout(false)){
return -1;
}
cv::Mat image; cv::Mat image;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch(); qint64 currentTime= QDateTime::currentSecsSinceEpoch();
mediaFaceImage->FaceImageCallBack(hObject,sxMediaFaceImageReq->nChannel,image); mediaFaceImage->FaceImageCallBack(hObject,this->channel,image);
if (image.empty()) if (image.empty())
{ {
qInfo() << "Failed to read the image"; qInfo() << "Failed to read the image";
return -1; return -1;
} }
this->updateImage(image,currentTime); if (image.rows <= 0 || image.cols <= 0 || image.channels() <= 0) {
QMetaObject::invokeMethod(release_timer, "stop", Qt::QueuedConnection); qInfo() << "图像尺寸或通道数不正确,需排查原因";
semaphore.release(); return -1;
}
updateImage(image, currentTime);
} }
void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
if(hDevice<=0){ if(hDevice<=0){
qInfo() << "相机断线"; qInfo() << "相机断线";
return; return;
} }
if(acquireAndReleaseWithTimeout(true)){ if (!semaphore.tryAcquire()) {
qInfo() << "callbackFunction:正在执行线程";
return ; return ;
} }
ScopeSemaphoreExit guard([this]() {
semaphore.release(); // 释放信号量
});
cv::Mat image; cv::Mat image;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch(); qint64 currentTime= QDateTime::currentSecsSinceEpoch();
int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image); int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image);
qDebug() << "SdkDevSnapSyn HTTP POST request to: " << sSn; qDebug() << "SdkDevSnapSyn HTTP POST request to: " << sSn;
if (ret < 0) { if (ret < 0) {
offlineCount++; // 累加计数器 offlineCount++; // 累加计数器
qDebug() << "offlineCount: " << loginParam->sDevId<<offlineCount; qDebug() << "offlineCount: " << loginParam->sDevId<<offlineCount;
if (offlineCount >= 3) { // 判断是否连续3次返回0 if (offlineCount >= 3) { // 判断是否连续3次返回0
qInfo() << "设备离线"; qInfo() << "设备离线";
QString ip=QString::fromUtf8(loginParam->sDevId); QString ip=QString::fromUtf8(loginParam->sDevId);
...@@ -413,10 +405,11 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){ ...@@ -413,10 +405,11 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
qInfo() << "Failed to read the image"; qInfo() << "Failed to read the image";
return ; return ;
} }
if (image.rows <= 0 || image.cols <= 0 || image.channels() <= 0) {
this->updateImage(image,currentTime); qInfo() << "图像尺寸或通道数不正确,需排查原因";
QMetaObject::invokeMethod(release_timer, "stop", Qt::QueuedConnection); return ;
semaphore.release(); }
updateImage(image, currentTime);
} }
void CameraHandle::setTimeoutMs(int timeoutMs){ void CameraHandle::setTimeoutMs(int timeoutMs){
...@@ -429,7 +422,6 @@ void CameraHandle::matToBase64(const cv::Mat &image, QByteArray &base64Data) { ...@@ -429,7 +422,6 @@ void CameraHandle::matToBase64(const cv::Mat &image, QByteArray &base64Data) {
cv::imencode(".jpg", image, buffer, params); cv::imencode(".jpg", image, buffer, params);
base64Data = QByteArray(reinterpret_cast<const char*>(buffer.data()), buffer.size()).toBase64(); base64Data = QByteArray(reinterpret_cast<const char*>(buffer.data()), buffer.size()).toBase64();
} }
void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat &frame, RecognizedInfo& newInfo, void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat &frame, RecognizedInfo& newInfo,
int &result,std::map<int,RecognizedInfo>&exitAndMoMap){ int &result,std::map<int,RecognizedInfo>&exitAndMoMap){
if (newInfo.getLicensePlate() != park->getCurrentPlate().getLicensePlate()) { if (newInfo.getLicensePlate() != park->getCurrentPlate().getLicensePlate()) {
...@@ -445,26 +437,31 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma ...@@ -445,26 +437,31 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
if(park->getCurrentPlate().getLicensePlate().length()<=0){ if(park->getCurrentPlate().getLicensePlate().length()<=0){
//进场 //进场
park->setCurrentPlate(newInfo); park->setCurrentPlate(newInfo);
result=CAR_INFORMATION::Mobilization; result=Mobilization;
}else { }else {
//当前为空,立场 //当前为空,立场
if(newInfo.getLicensePlate().length()<=0){ if(newInfo.getLicensePlate().length()<=0){
HumanDetection &humanDetection=HumanDetection::getInstance(); HumanDetection &humanDetection=HumanDetection::getInstance();
int car_size = humanDetection.findHuManCar(frame,1,detector); int car_size = humanDetection.findHuManCar(frame,1,detector);
qDebug()<<sSn<<":"<<"当前车形数量:"<<car_size;
if(car_size<=0){ if(car_size<=0){
//出场 //出场
park->setCurrentPlate(newInfo); park->setCurrentPlate(newInfo);
result=CAR_INFORMATION::Exit; result=Exit;
}else{ }else{
park-> removeNoQueue(); park-> removeNoQueue();
qDebug()<<sSn<<":"<<"no出场:"<<car_size; qDebug()<<sSn<<":"<<"no出场:"<<car_size;
} }
}else{ }else{
qDebug()<<sSn<<":"<<"出场:"<<2;
qDebug()<<sSn<<":"<<"老车出场:"<<park->getCurrentPlate().getLicensePlate();
qDebug()<<sSn<<":"<<"老车出场:"<<park->getCurrentPlate().getLicensePlate();
//当前不为空,新车,新车入场,老车出场 //当前不为空,新车,新车入场,老车出场
exitAndMoMap[CAR_INFORMATION::Exit]=park->getCurrentPlate(); exitAndMoMap[Exit]=park->getCurrentPlate();
exitAndMoMap[CAR_INFORMATION::Mobilization]=newInfo; exitAndMoMap[Mobilization]=newInfo;
park->setCurrentPlate(newInfo); park->setCurrentPlate(newInfo);
result=CAR_INFORMATION::ExitAndMobilization; result=ExitAndMobilization;
} }
} }
} }
...@@ -473,36 +470,47 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma ...@@ -473,36 +470,47 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
qDebug()<<"=============================>"; qDebug()<<"=============================>";
static int i=0;
printf("updateImage%d次\n", ++i);
faceCount.fetch_add(1, std::memory_order_relaxed);
qDebug()<<"faceCount==>"<<faceCount.load(std::memory_order_relaxed);
int width = frame.cols; // 获取图像宽度
int height = frame.rows; // 获取图像高度
qDebug()<<"frame 宽度:"<<width<<"frame 高度:"<<height;
FaceReconition &faceRecognition = FaceReconition::getInstance(); FaceReconition &faceRecognition = FaceReconition::getInstance();
HumanDetection &humanDetection=HumanDetection::getInstance(); HumanDetection &humanDetection=HumanDetection::getInstance();
LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance(); LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance();
static int i=0; static int ii=0;
printf("updateImage retryCount: %d \n", ++i); printf("updateImage retryCount: %d \n", ++ii);
//faceRecognition.search(frame,imageHandleList,names); //faceRecognition.search(frame,imageHandleList,names);
QByteArray imgs; QByteArray imgs;
int faSize=humanDetection.findHuManCar(frame,0,detector); int faSize=humanDetection.findHuManCar(frame,0,detector);
this->matToBase64(frame, imgs); this->matToBase64(frame, imgs);
HttpService httpService(httpUrl); HttpService httpService(httpUrl);
if(currentFace!=faSize){ if(currentFace!=faSize){
vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,sSn,currentTime); if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){
if (resp->code!= 0) { vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,sSn,currentTime);
qInfo()<<"人数变化推送信息推送失败"; if (resp->code!= 0) {
qInfo()<<"人数变化推送信息推送失败";
}
instace.deleteObj(resp);
currentFace=faSize;
} }
instace.deleteObj(resp);
currentFace=faSize;
} }
if(faSize>0){ if(faSize>0){
qDebug() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId();
std::list<vides_data::faceRecognitionResult>faces; std::list<vides_data::faceRecognitionResult>faces;
faceRecognition.doesItExistEmployee(frame,faces); faceRecognition.doesItExistEmployee(frame,faces);
if (!faces.empty()) { if (faces.size()>0) {
for (const auto& face : faces) { for(auto face:faces){
vides_data::requestFaceReconition faceReconition; vides_data::requestFaceReconition faceReconition;
faceReconition.id = face.id; faceReconition.id = face.id;
faceReconition.img = imgs; faceReconition.img = imgs;
...@@ -514,14 +522,14 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -514,14 +522,14 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
faceReconition.area.bottom_right_corner_y= face.y + face.height; faceReconition.area.bottom_right_corner_y= face.y + face.height;
faceReconition.area.bottom_left_corner_x = face.x; faceReconition.area.bottom_left_corner_x = face.x;
faceReconition.area.bottom_left_corner_y = face.y + face.height; faceReconition.area.bottom_left_corner_y = face.y + face.height;
faceReconition.area.top_right_corner_x = face.x + face.width; faceReconition.area.top_right_corner_x = face.x + face.width;
faceReconition.area.top_right_corner_y = face.y; faceReconition.area.top_right_corner_y = face.y;
httpService.setHttpUrl(httpUrl); httpService.setHttpUrl(httpUrl);
vides_data::response* resp = httpService.httpPostFaceReconition(faceReconition); vides_data::response* resp = httpService.httpPostFaceReconition(faceReconition);
if (resp->code!= 0) { if (resp->code!= 0) {
qInfo() << "识别人脸信息推送失败:" << face.id; qInfo()<<"识别人脸信息推送失败"<<face.id;
} }
instace.deleteObj(resp); instace.deleteObj(resp);
} }
...@@ -530,20 +538,20 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -530,20 +538,20 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
QString lpNumber; QString lpNumber;
vides_data::requestLicensePlate plate; vides_data::requestLicensePlate plate;
plate.sn=sSn; plate.sn=sSn;
// return ; // return ;
if(image_save==1){ if(image_save==1){
QString fileName= instace.getVideoOut().append(instace.getTimeString()+".jpg"); QString fileName= instace.getVideoOut().append(instace.getTimeString()+".jpg");
bool success = cv::imwrite(fileName.toStdString(), frame); bool success = cv::imwrite(fileName.toStdString(), frame);
if (success) { if (success) {
qDebug() << "图片已成功保存至:" << fileName; qDebug() << "图片已成功保存至:" << fileName;
} else { } else {
qDebug() << "图片保存失败!"; qDebug() << "图片保存失败!";
} }
} }
licensePlateRecogn.licensePlateNumber(frame, lpNumber,plate,currentTime,ctx); licensePlateRecogn.licensePlateNumber(frame, lpNumber,plate,currentTime,ctx);
std::map<int,RecognizedInfo>exitMoMap; std::map<int,RecognizedInfo>exitMoMap;
vides_data::requestLicensePlate newPlate; vides_data::requestLicensePlate newPlate;
...@@ -589,9 +597,38 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -589,9 +597,38 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
value->removeQueue(); value->removeQueue();
} }
vides_data::LicensePlate recognition= indexToLicensePlate.at(key); vides_data::LicensePlate recognition= indexToLicensePlate.at(key);
RecognizedInfo recognizedInfo(recognition.new_plate,recognition.time,recognition.new_color); RecognizedInfo recognizedInfo;
value->addQueue(recognizedInfo); if (recognition.new_color=="蓝牌" && recognition.new_plate.length() != 7) {
this->checkAndUpdateCurrentPlate(value,frame,recognizedInfo,res,exitMoMap); return;
} else if (recognition.new_color=="绿牌新能源" && recognition.new_plate.length() != 8) {
return;
} else if (recognition.new_plate.length() != 7) {
return;
}
if(recognition.text_confidence>=instace.getCarConfidenceMax()){
if(value->getQueue().size()>=7 && value->getQueue().size()<=10) {
for (int i = 0; i < 3; ++i) {
value->removeQueue();
}
}
for (int var = 0; var < 3; ++var) {
RecognizedInfo info(recognition.new_plate,recognition.time,recognition.new_color);
value->addQueue(info);
recognizedInfo=std::move(info);
}
this->checkAndUpdateCurrentPlate(value,frame,recognizedInfo,res,exitMoMap);
}
if(recognition.text_confidence<=instace.getCarConfidenceMin()){
return;
}
if(recognition.text_confidence>instace.getCarConfidenceMin()
&& recognition.text_confidence<instace.getCarConfidenceMax())
{
RecognizedInfo info(recognition.new_plate,recognition.time,recognition.new_color);
value->addQueue(info);
recognizedInfo=std::move(info);
this->checkAndUpdateCurrentPlate(value,frame,recognizedInfo,res,exitMoMap);
}
if (res == Exit || res == Mobilization) { if (res == Exit || res == Mobilization) {
recognition.areaLocation=value->getArea(); recognition.areaLocation=value->getArea();
recognition.img=imgs; recognition.img=imgs;
...@@ -604,7 +641,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -604,7 +641,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
recognition.img=imgs; recognition.img=imgs;
recognition.new_color=recognizedInfo.getColor(); recognition.new_color=recognizedInfo.getColor();
newPlate.plates.push_back(std::move(recognition)); newPlate.plates.push_back(std::move(recognition));
RecognizedInfo exitInfo=exitMoMap[Exit]; RecognizedInfo exitInfo=exitMoMap[Exit];
vides_data::LicensePlate oldInfo; vides_data::LicensePlate oldInfo;
oldInfo.areaLocation=value->getArea(); oldInfo.areaLocation=value->getArea();
...@@ -623,7 +660,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -623,7 +660,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
value->addQueue(recognizedInfo); value->addQueue(recognizedInfo);
int res; int res;
this->checkAndUpdateCurrentPlate(value, frame, recognizedInfo, res,exitMoMap); this->checkAndUpdateCurrentPlate(value, frame, recognizedInfo, res,exitMoMap);
if (res == Exit || res == Mobilization) { if (res == Exit || res == Mobilization) {
vides_data::LicensePlate current; vides_data::LicensePlate current;
current.areaLocation = value->getArea(); current.areaLocation = value->getArea();
...@@ -641,7 +678,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -641,7 +678,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
current.new_color = recognizedInfo.getColor(); current.new_color = recognizedInfo.getColor();
current.new_plate = recognizedInfo.getLicensePlate(); current.new_plate = recognizedInfo.getLicensePlate();
current.time = recognizedInfo.getRecognizeTime(); current.time = recognizedInfo.getRecognizeTime();
newPlate.plates.push_back(std::move(current)); newPlate.plates.push_back(std::move(current));
RecognizedInfo exitInfo=exitMoMap[Exit]; RecognizedInfo exitInfo=exitMoMap[Exit];
vides_data::LicensePlate oldInfo; vides_data::LicensePlate oldInfo;
...@@ -683,7 +720,7 @@ void CameraHandle::pushRecordToCloud(int id, int recognitionType, QString ossUrl ...@@ -683,7 +720,7 @@ void CameraHandle::pushRecordToCloud(int id, int recognitionType, QString ossUrl
instace.deleteObj(reco); instace.deleteObj(reco);
} }
instace.deleteObj(res); instace.deleteObj(res);
} }
void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlate &location){ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlate &location){
...@@ -696,7 +733,7 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat ...@@ -696,7 +733,7 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat
HttpService httpService(httpUrl); HttpService httpService(httpUrl);
std::list<vides_data::responseRecognitionData> result; std::list<vides_data::responseRecognitionData> result;
vides_data::response*resp= httpService.httpLicensePlateRecognition(location,result); vides_data::response*resp= httpService.httpLicensePlateRecognition(location,result);
if (resp->code == 0) { if (resp->code == 0) {
if(result.size()==0){ if(result.size()==0){
return ; return ;
...@@ -716,7 +753,7 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat ...@@ -716,7 +753,7 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat
// data.recognitionType=var.recognitionType; // data.recognitionType=var.recognitionType;
// data.sn=var.sn; // data.sn=var.sn;
// videoCurrentData[var.id]=data; // videoCurrentData[var.id]=data;
// sdkDownloadFileByTime(this->hDevice,var.id, // sdkDownloadFileByTime(this->hDevice,var.id,
// instace.timestampToDateString(var.inTime),instace.timestampToDateString(var.outTime)); // instace.timestampToDateString(var.inTime),instace.timestampToDateString(var.outTime));
// } // }
...@@ -730,7 +767,7 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat ...@@ -730,7 +767,7 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat
qInfo()<<"licensePlateRecognitionResults:车牌识别结果失败"; qInfo()<<"licensePlateRecognitionResults:车牌识别结果失败";
// 在达到最大重试次数且仍然没有成功的情况下执行相应的处理逻辑 // 在达到最大重试次数且仍然没有成功的情况下执行相应的处理逻辑
} }
} }
void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){ void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){
...@@ -739,7 +776,7 @@ void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){ ...@@ -739,7 +776,7 @@ void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){
char outBuffer[512] = { 0 }; char outBuffer[512] = { 0 };
int nInOutBufSize = sizeof(outBuffer); int nInOutBufSize = sizeof(outBuffer);
const char* zoneCfg ="{ \"FirstUserTimeZone\" : \"true\", \"OPTimeSetting\" : \"800\" }"; const char* zoneCfg ="{ \"FirstUserTimeZone\" : \"true\", \"OPTimeSetting\" : \"800\" }";
int res = XSDK_DevSetSysConfigSyn(hDevice, JK_System_TimeZone, zoneCfg, strlen(zoneCfg), outBuffer, &nInOutBufSize, 5000, EXCMD_CONFIG_GET); int res = XSDK_DevSetSysConfigSyn(hDevice, JK_System_TimeZone, zoneCfg, strlen(zoneCfg), outBuffer, &nInOutBufSize, 5000, EXCMD_CONFIG_GET);
if(res<0){ if(res<0){
qInfo() << "FirstUserTimeZone:修改失败"; qInfo() << "FirstUserTimeZone:修改失败";
...@@ -752,7 +789,7 @@ void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){ ...@@ -752,7 +789,7 @@ void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){
//录像设置 //录像设置
void CameraHandle::sdkRecordCfg(const char * recordJson){ void CameraHandle::sdkRecordCfg(const char * recordJson){
qDebug()<<recordJson; qDebug()<<recordJson;
char szOutBuffer[512] = { 0 }; char szOutBuffer[512] = { 0 };
int nLen = sizeof(szOutBuffer);; int nLen = sizeof(szOutBuffer);;
...@@ -780,7 +817,7 @@ void CameraHandle::sdkDevSpvMn(const char *spvMn){ ...@@ -780,7 +817,7 @@ void CameraHandle::sdkDevSpvMn(const char *spvMn){
} }
} }
bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2) { bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2) {
QPolygonF realPolygon; QPolygonF realPolygon;
realPolygon << QPointF(poly1.getArea().topLeftCornerX, poly1.getArea().topLeftCornerY) realPolygon << QPointF(poly1.getArea().topLeftCornerX, poly1.getArea().topLeftCornerY)
<< QPointF(poly1.getArea().bottomLeftCornerX, poly1.getArea().bottomLeftCornerY) << QPointF(poly1.getArea().bottomLeftCornerX, poly1.getArea().bottomLeftCornerY)
...@@ -788,8 +825,8 @@ bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo & ...@@ -788,8 +825,8 @@ bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo &
<< QPointF(poly1.getArea().topRightCornerX, poly1.getArea().topRightCornerY); << QPointF(poly1.getArea().topRightCornerX, poly1.getArea().topRightCornerY);
QPainterPath realPath; QPainterPath realPath;
realPath.addPolygon(realPolygon); realPath.addPolygon(realPolygon);
QPolygonF spacePolygon; QPolygonF spacePolygon;
spacePolygon << QPointF(poly2.getArea().topLeftCornerX, poly2.getArea().topLeftCornerY) spacePolygon << QPointF(poly2.getArea().topLeftCornerX, poly2.getArea().topLeftCornerY)
<< QPointF(poly2.getArea().bottomLeftCornerX, poly2.getArea().bottomLeftCornerY) << QPointF(poly2.getArea().bottomLeftCornerX, poly2.getArea().bottomLeftCornerY)
...@@ -797,10 +834,10 @@ bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo & ...@@ -797,10 +834,10 @@ bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo &
<< QPointF(poly2.getArea().topRightCornerX, poly2.getArea().topRightCornerY); << QPointF(poly2.getArea().topRightCornerX, poly2.getArea().topRightCornerY);
QPainterPath spacePath; QPainterPath spacePath;
spacePath.addPolygon(spacePolygon); spacePath.addPolygon(spacePolygon);
// 使用intersected方法获取两个路径的交集 // 使用intersected方法获取两个路径的交集
QPainterPath intersection = realPath.intersected(spacePath); QPainterPath intersection = realPath.intersected(spacePath);
// 如果交集不为空,则两个多边形重叠 // 如果交集不为空,则两个多边形重叠
return !intersection.isEmpty(); return !intersection.isEmpty();
} }
...@@ -808,9 +845,9 @@ bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo & ...@@ -808,9 +845,9 @@ bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo &
double CameraHandle::calculateIntersectionArea(const QPolygonF &polygon1, const QPolygonF &polygon2) { double CameraHandle::calculateIntersectionArea(const QPolygonF &polygon1, const QPolygonF &polygon2) {
QPolygonF intersection = polygon1.intersected(polygon2); QPolygonF intersection = polygon1.intersected(polygon2);
int n = intersection.count(); int n = intersection.count();
if (n < 3) return 0.0; if (n < 3) return 0.0;
// 构建增量式凸包 // 构建增量式凸包
std::vector<QPointF> convexHullPoints; std::vector<QPointF> convexHullPoints;
for (const QPointF& point : intersection) { for (const QPointF& point : intersection) {
...@@ -819,7 +856,7 @@ double CameraHandle::calculateIntersectionArea(const QPolygonF &polygon1, const ...@@ -819,7 +856,7 @@ double CameraHandle::calculateIntersectionArea(const QPolygonF &polygon1, const
} }
convexHullPoints.push_back(point); convexHullPoints.push_back(point);
} }
double area = 0.0; double area = 0.0;
for (size_t i = 0; i < convexHullPoints.size(); ++i) { for (size_t i = 0; i < convexHullPoints.size(); ++i) {
size_t j = (i + 1) % convexHullPoints.size(); size_t j = (i + 1) % convexHullPoints.size();
...@@ -858,7 +895,7 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){ ...@@ -858,7 +895,7 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){
for (const auto& point : polygonInfoPoints) { for (const auto& point : polygonInfoPoints) {
qDebug() << "(" << point.x << ", " << point.y << ")"; qDebug() << "(" << point.x << ", " << point.y << ")";
} }
std::vector<cv::Point2f> intersection; std::vector<cv::Point2f> intersection;
double intersectionArea = cv::intersectConvexConvex(polygonInfoPoints, currentPolygonPoints, intersection, true); double intersectionArea = cv::intersectConvexConvex(polygonInfoPoints, currentPolygonPoints, intersection, true);
if (intersectionArea>0.0 && intersectionArea > maxIntersectionArea) { if (intersectionArea>0.0 && intersectionArea > maxIntersectionArea) {
...@@ -866,7 +903,7 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){ ...@@ -866,7 +903,7 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){
areaOfMaxIntersection = info->getSpaceIndex(); areaOfMaxIntersection = info->getSpaceIndex();
} }
} }
return areaOfMaxIntersection; return areaOfMaxIntersection;
} }
...@@ -879,25 +916,25 @@ int CameraHandle::determineArea(ParkingSpaceInfo &prakArea){ ...@@ -879,25 +916,25 @@ int CameraHandle::determineArea(ParkingSpaceInfo &prakArea){
<< QPoint(prakArea.getArea().bottomLeftCornerX,prakArea.getArea().bottomLeftCornerY) << QPoint(prakArea.getArea().bottomLeftCornerX,prakArea.getArea().bottomLeftCornerY)
<< QPoint(prakArea.getArea().bottomRightCornerX, prakArea.getArea().bottomRightCornerY) << QPoint(prakArea.getArea().bottomRightCornerX, prakArea.getArea().bottomRightCornerY)
<< QPoint(prakArea.getArea().topRightCornerX, prakArea.getArea().topRightCornerY); << QPoint(prakArea.getArea().topRightCornerX, prakArea.getArea().topRightCornerY);
for (ParkingSpaceInfo *info : parkingSpaceInfos) { for (ParkingSpaceInfo *info : parkingSpaceInfos) {
QPolygon polygonInfo; // 移动定义到这里,确保每次迭代时重新初始化 QPolygon polygonInfo; // 移动定义到这里,确保每次迭代时重新初始化
polygonInfo << QPoint(info->getArea().topLeftCornerX, info->getArea().topLeftCornerY) polygonInfo << QPoint(info->getArea().topLeftCornerX, info->getArea().topLeftCornerY)
<< QPoint(info->getArea().bottomLeftCornerX, info->getArea().bottomLeftCornerY) << QPoint(info->getArea().bottomLeftCornerX, info->getArea().bottomLeftCornerY)
<< QPoint(info->getArea().bottomRightCornerX, info->getArea().bottomRightCornerY) << QPoint(info->getArea().bottomRightCornerX, info->getArea().bottomRightCornerY)
<< QPoint(info->getArea().topRightCornerX, info->getArea().topRightCornerY); << QPoint(info->getArea().topRightCornerX, info->getArea().topRightCornerY);
if (polygonsOverlap(prakArea, *info)) { if (polygonsOverlap(prakArea, *info)) {
double currentIntersection = calculateIntersectionArea(polygonInfo, currentPolygon); double currentIntersection = calculateIntersectionArea(polygonInfo, currentPolygon);
if (currentIntersection > maxIntersectionArea) { if (currentIntersection > maxIntersectionArea) {
maxIntersectionArea = currentIntersection; maxIntersectionArea = currentIntersection;
areaOfMaxIntersection = info->getSpaceIndex(); areaOfMaxIntersection = info->getSpaceIndex();
} }
} }
} }
return areaOfMaxIntersection; return areaOfMaxIntersection;
} }
void CameraHandle::initParkingSpaceInfo(const std::list<vides_data::responseArea> &areas){ void CameraHandle::initParkingSpaceInfo(const std::list<vides_data::responseArea> &areas){
...@@ -907,15 +944,15 @@ void CameraHandle::initParkingSpaceInfo(const std::list<vides_data::responseArea ...@@ -907,15 +944,15 @@ void CameraHandle::initParkingSpaceInfo(const std::list<vides_data::responseArea
vides_data::ParkingArea pArea; vides_data::ParkingArea pArea;
pArea.bottomLeftCornerX = area->bottom_left_corner_x; pArea.bottomLeftCornerX = area->bottom_left_corner_x;
pArea.bottomLeftCornerY = area->bottom_left_corner_y; pArea.bottomLeftCornerY = area->bottom_left_corner_y;
pArea.topLeftCornerX = area->top_left_corner_x; pArea.topLeftCornerX = area->top_left_corner_x;
pArea.topLeftCornerY = area->top_left_corner_y; pArea.topLeftCornerY = area->top_left_corner_y;
pArea.topRightCornerX = area->top_right_corner_x; pArea.topRightCornerX = area->top_right_corner_x;
pArea.topRightCornerY = area->top_right_corner_y; pArea.topRightCornerY = area->top_right_corner_y;
pArea.bottomRightCornerX = area->bottom_right_corner_x; pArea.bottomRightCornerX = area->bottom_right_corner_x;
pArea.bottomRightCornerY = area->bottom_right_corner_y; pArea.bottomRightCornerY = area->bottom_right_corner_y;
info->setArea(pArea); info->setArea(pArea);
if (parkMap.find(index) == parkMap.end()) { if (parkMap.find(index) == parkMap.end()) {
info->setSpaceIndex(index); // Assuming this method sets the space index info->setSpaceIndex(index); // Assuming this method sets the space index
...@@ -938,10 +975,10 @@ bool CameraHandle::compareLists(const std::list<vides_data::responseArea>& newAr ...@@ -938,10 +975,10 @@ bool CameraHandle::compareLists(const std::list<vides_data::responseArea>& newAr
if (newAreas.size() != areas.size()) { if (newAreas.size() != areas.size()) {
return false; return false;
} }
auto itResponse = newAreas.begin(); auto itResponse = newAreas.begin();
auto itParking = areas.begin(); auto itParking = areas.begin();
// 逐个比较 responseArea 和 ParkingArea 对象是否相同 // 逐个比较 responseArea 和 ParkingArea 对象是否相同
while (itResponse != newAreas.end() && itParking != areas.end()) { while (itResponse != newAreas.end() && itParking != areas.end()) {
if (itResponse->bottom_right_corner_x != itParking->bottomRightCornerX || if (itResponse->bottom_right_corner_x != itParking->bottomRightCornerX ||
...@@ -954,11 +991,11 @@ bool CameraHandle::compareLists(const std::list<vides_data::responseArea>& newAr ...@@ -954,11 +991,11 @@ bool CameraHandle::compareLists(const std::list<vides_data::responseArea>& newAr
itResponse->top_right_corner_y != itParking->topRightCornerY) { itResponse->top_right_corner_y != itParking->topRightCornerY) {
return false; // 如果任意一个元素不匹配,则返回 false return false; // 如果任意一个元素不匹配,则返回 false
} }
++itResponse; ++itResponse;
++itParking; ++itParking;
} }
return true; return true;
} }
void CameraHandle::updateParkMapAndParkingSpaceInfos(const std::list<vides_data::responseArea>&newAreas){ void CameraHandle::updateParkMapAndParkingSpaceInfos(const std::list<vides_data::responseArea>&newAreas){
......
...@@ -21,6 +21,7 @@ ...@@ -21,6 +21,7 @@
#include <QDateTime> #include <QDateTime>
#include <QJsonDocument> #include <QJsonDocument>
#include <memory> #include <memory>
#include <functional>
#include <QString> #include <QString>
#include <QObject> #include <QObject>
#include <QTimer> #include <QTimer>
...@@ -28,9 +29,7 @@ ...@@ -28,9 +29,7 @@
#include <QQueue> #include <QQueue>
#include <opencv2/opencv.hpp> #include <opencv2/opencv.hpp>
#include <QSemaphore> #include <QSemaphore>
#include <atomic>
enum CAR_INFORMATION { enum CAR_INFORMATION {
Exit, //出场 Exit, //出场
Mobilization, //进场 Mobilization, //进场
...@@ -46,25 +45,25 @@ public: ...@@ -46,25 +45,25 @@ public:
int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout); int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
//int SdkMediaGetFaceImage(int hDevice, int nSeq, int nTimeout); //int SdkMediaGetFaceImage(int hDevice, int nSeq, int nTimeout);
int sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener); int sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener);
int getHdevice(); int getHdevice();
int getChannel(); int getChannel();
void clearCameraHandle(); void clearCameraHandle();
// void rebindTimer(int hDevice); // void rebindTimer(int hDevice);
void initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer); void initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 face_frequency);
void updateImage(const cv::Mat & frame,qint64 currentTime); void updateImage(const cv::Mat & frame,qint64 currentTime);
void matToBase64(const cv::Mat &image, QByteArray &base64Data); void matToBase64(const cv::Mat &image, QByteArray &base64Data);
int callbackFunction(XSDK_HANDLE hObject,QString &szString); int callbackFunction(XSDK_HANDLE hObject,QString &szString);
void checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat & frame,RecognizedInfo& newInfo,int &result,std::map<int,RecognizedInfo>&exitAndMoMap); void checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat & frame,RecognizedInfo& newInfo,int &result,std::map<int,RecognizedInfo>&exitAndMoMap);
void licensePlateRecognitionResults(vides_data::requestLicensePlate &location); void licensePlateRecognitionResults(vides_data::requestLicensePlate &location);
void sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel); void sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel);
//时间设置 //时间设置
void sdkDevSystemTimeZoneSyn(QString &time); void sdkDevSystemTimeZoneSyn(QString &time);
...@@ -74,12 +73,11 @@ public: ...@@ -74,12 +73,11 @@ public:
void sdkEncodeCfg(const char *enCode); void sdkEncodeCfg(const char *enCode);
//28181更新 //28181更新
void sdkDevSpvMn(const char* spvMn); void sdkDevSpvMn(const char* spvMn);
void sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id, void sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
QString startTimer,QString endTime); QString startTimer,QString endTime);
void setTimeoutMs(int timeoutMs); void setTimeoutMs(int timeoutMs);
bool acquireAndReleaseWithTimeout(bool flag);
QString getSSn(); QString getSSn();
...@@ -87,7 +85,7 @@ public: ...@@ -87,7 +85,7 @@ public:
void setMediaHandle(int mediaHandle); void setMediaHandle(int mediaHandle);
void setCurrentFace(int currentFace); void setCurrentFace(int currentFace);
void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas); void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas);
bool compareLists(const std::list<vides_data::responseArea>& newAreas); bool compareLists(const std::list<vides_data::responseArea>& newAreas);
...@@ -108,23 +106,24 @@ public: ...@@ -108,23 +106,24 @@ public:
int determineArea(ParkingSpaceInfo &prakArea); int determineArea(ParkingSpaceInfo &prakArea);
signals: signals:
void callbackFrameReady(const cv::Mat &frame, const QString &url); void callbackFrameReady(const cv::Mat &frame, const QString &url);
void afterDownloadFile( int id,int recognitionType,QString ossUrl); void afterDownloadFile( int id,int recognitionType,QString ossUrl);
private slots: private slots:
void sdkRealTimeDevSnapSyn(int hDevice); void sdkRealTimeDevSnapSyn(int hDevice);
void pushRecordToCloud(int id,int recognitionType,QString ossUrl); void pushRecordToCloud(int id,int recognitionType,QString ossUrl);
void releaseSemaphore(); //void releaseSemaphore();
private : private :
int hDevice; int hDevice;
int channel; int channel;
QString httpUrl; QString httpUrl;
SXSDKLoginParam *loginParam; SXSDKLoginParam *loginParam;
SXMediaFaceImageReq *sxMediaFaceImageReq; SXMediaFaceImageReq *sxMediaFaceImageReq;
std::mutex plateMutex; std::mutex plateMutex;
std::mutex faceMutex; std::mutex faceMutex;
QString sSn; QString sSn;
QString url; QString url;
...@@ -143,8 +142,6 @@ private : ...@@ -143,8 +142,6 @@ private :
//2秒钟抓一次图 //2秒钟抓一次图
QTimer *dev_snap_syn_timer; QTimer *dev_snap_syn_timer;
QTimer *release_timer;
int offlineCount=0; int offlineCount=0;
TCV_HumanDetector *detector; TCV_HumanDetector *detector;
...@@ -154,10 +151,12 @@ private : ...@@ -154,10 +151,12 @@ private :
QSemaphore semaphore; QSemaphore semaphore;
int timeoutMs; int timeoutMs;
int image_save; int image_save;
std::atomic<uint64> faceCount;
uint64 face_frequency;
}; };
#endif // CAMERAHANDLE_H #endif // CAMERAHANDLE_H
...@@ -63,6 +63,19 @@ void Common::setImages(QString images){ ...@@ -63,6 +63,19 @@ void Common::setImages(QString images){
this->images=images; this->images=images;
} }
float Common::getCarConfidenceMax() const{
return carConfidenceMax;
}
void Common::setCarConfidenceMax(float carConfidenceMax){
this->carConfidenceMax=carConfidenceMax;
}
float Common::getCarConfidenceMin() const{
return carConfidenceMin;
}
void Common::setCarConfidenceMin(float carConfidenceMin){
this->carConfidenceMin=carConfidenceMin;
}
QString Common::GetLocalIp() { QString Common::GetLocalIp() {
QString ipAddress; QString ipAddress;
QList<QHostAddress> list = QNetworkInterface::allAddresses(); QList<QHostAddress> list = QNetworkInterface::allAddresses();
......
...@@ -44,6 +44,11 @@ public: ...@@ -44,6 +44,11 @@ public:
QString GetLocalIp(); QString GetLocalIp();
float getCarConfidenceMax() const;
void setCarConfidenceMax(float carConfidenceMax);
float getCarConfidenceMin() const;
void setCarConfidenceMin(float carConfidenceMin);
template<typename T> template<typename T>
void deleteObj(T*& obj) { void deleteObj(T*& obj) {
...@@ -57,6 +62,8 @@ private: ...@@ -57,6 +62,8 @@ private:
QString videoOut; QString videoOut;
QString videoDownload; QString videoDownload;
QString images; QString images;
float carConfidenceMax;
float carConfidenceMin;
Common(); Common();
~Common(); ~Common();
......
...@@ -6,19 +6,18 @@ ...@@ -6,19 +6,18 @@
#include "herror.h" #include "herror.h"
#include "LogHandle.h" #include "LogHandle.h"
#include "VidesData.h" #include "VidesData.h"
#include <mutex>
class FaceReconition class FaceReconition
{ {
private: private:
static FaceReconition* instance; static FaceReconition* instance;
HContextHandle ctxHandle; HContextHandle ctxHandle=nullptr;
float configConfidence; float configConfidence;
std::vector<int32_t>customIds; std::vector<int32_t>customIds;
FaceReconition(); FaceReconition();
~FaceReconition(); ~FaceReconition();
......
#include "FaceRecognition.h" #include "FaceRecognition.h"
#include <QThread>
FaceReconition::FaceReconition() {} FaceReconition::FaceReconition() {}
...@@ -23,23 +25,21 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float con ...@@ -23,23 +25,21 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float con
QString bPath = QCoreApplication::applicationDirPath() + "/model_zip/T1_5"; QString bPath = QCoreApplication::applicationDirPath() + "/model_zip/T1_5";
#else #else
#error "不支持的架构" #error "不支持的架构"
#endif #endif
QByteArray && bypath = bPath.toUtf8(); QByteArray && bypath = bPath.toUtf8();
char* spath = bypath.data(); char* spath = bypath.data();
HString path = spath; HString path = spath;
HInt32 option = HF_ENABLE_QUALITY | HF_ENABLE_FACE_RECOGNITION | HF_ENABLE_MASK_DETECT; HInt32 option = HF_ENABLE_QUALITY | HF_ENABLE_FACE_RECOGNITION | HF_ENABLE_MASK_DETECT;
HF_DetectMode detMode = HF_DETECT_MODE_IMAGE; // 选择图像模式 即总是检测 HF_DetectMode detMode = HF_DETECT_MODE_IMAGE; // 选择图像模式 即总是检测
// 创建ctx if(ctxHandle==nullptr){
ret = HF_CreateFaceContextFromResourceFileOptional(path, option, detMode, 5, &ctxHandle); // 创建ctx
if (ret != HSUCCEED) { ret = HF_CreateFaceContextFromResourceFileOptional(path, option, detMode, 5, &ctxHandle);
qInfo() << QString("Create ctx error: %1").arg(ret); if (ret != HSUCCEED) {
return; qInfo() << QString("Create ctx error: %1").arg(ret);
return;
}
} }
// ret = HF_FaceRecognitionThresholdSetting(ctxHandle, 0.36);
// if (ret != HSUCCEED) {
// qInfo() << QString("HF_FaceRecognitionThresholdSetting error: %1").arg(ret);
// return;
// }
customIds.clear(); customIds.clear();
int i = 0; int i = 0;
for (auto it = maps.begin(); it != maps.end(); ++it,++i) { for (auto it = maps.begin(); it != maps.end(); ++it,++i) {
...@@ -57,7 +57,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float con ...@@ -57,7 +57,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float con
imageData.width = image.cols; imageData.width = image.cols;
imageData.rotation = VIEW_ROTATION_0; imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR; imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle; HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle); ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
this->configConfidence=confidence; this->configConfidence=confidence;
...@@ -66,25 +66,25 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float con ...@@ -66,25 +66,25 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float con
HF_ReleaseImageStream(imageSteamHandle); // 释放资源 HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return; return;
} }
HF_MultipleFaceData multipleFaceData = {0}; HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData); HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) { if (multipleFaceData.detectedNum <= 0) {
qInfo() << QString("initSourceImageMap:未检测到人脸: %1").arg(key); qInfo() << QString("initSourceImageMap:未检测到人脸: %1").arg(key);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源 HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return; return;
} }
HF_FaceFeature feature = {0}; HF_FaceFeature feature = {0};
ret = HF_FaceFeatureExtract(ctxHandle, imageSteamHandle, multipleFaceData.tokens[0], &feature); ret = HF_FaceFeatureExtract(ctxHandle, imageSteamHandle, multipleFaceData.tokens[0], &feature);
if (ret != HSUCCEED) { if (ret != HSUCCEED) {
qInfo() << QString("特征提取出错: %1").arg(ret); qInfo() << QString("特征提取出错: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源 HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return; return;
} }
char* tagName = new char[key.size() + 1]; char* tagName = new char[key.size() + 1];
std::strcpy(tagName, key.toStdString().c_str()); std::strcpy(tagName, key.toStdString().c_str());
HF_FaceFeatureIdentity identity = {0}; HF_FaceFeatureIdentity identity = {0};
...@@ -92,16 +92,16 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float con ...@@ -92,16 +92,16 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float con
identity.customId = i; identity.customId = i;
customIds.push_back( identity.customId); customIds.push_back( identity.customId);
identity.tag = tagName; identity.tag = tagName;
ret = HF_FeaturesGroupInsertFeature(ctxHandle, identity); ret = HF_FeaturesGroupInsertFeature(ctxHandle, identity);
if (ret != HSUCCEED) { if (ret != HSUCCEED) {
qInfo() << QString("插入失败: %1").arg(ret); qInfo() << QString("插入失败: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源 HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return; return;
} }
delete[] tagName; delete[] tagName;
ret = HF_ReleaseImageStream(imageSteamHandle); ret = HF_ReleaseImageStream(imageSteamHandle);
if (ret == HSUCCEED) { if (ret == HSUCCEED) {
imageSteamHandle = nullptr; imageSteamHandle = nullptr;
...@@ -118,8 +118,6 @@ int FaceReconition::featureRemove(){ ...@@ -118,8 +118,6 @@ int FaceReconition::featureRemove(){
HResult ret= HF_FeaturesGroupFeatureRemove(ctxHandle,customId); HResult ret= HF_FeaturesGroupFeatureRemove(ctxHandle,customId);
qDebug()<<"ret:featureRemove "<<ret; qDebug()<<"ret:featureRemove "<<ret;
} }
HF_ReleaseFaceContext(ctxHandle);
} }
} }
void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
...@@ -131,7 +129,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d ...@@ -131,7 +129,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
imageData.width = source.cols; imageData.width = source.cols;
imageData.rotation = VIEW_ROTATION_0; imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR; imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle; HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle); ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
if (ret != HSUCCEED) { if (ret != HSUCCEED) {
...@@ -140,12 +138,12 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d ...@@ -140,12 +138,12 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
} }
HF_MultipleFaceData multipleFaceData = {0}; HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData); HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) { if (multipleFaceData.detectedNum <= 0) {
qDebug()<<QString("search 未检测到人脸"); qDebug()<<QString("search 未检测到人脸");
return ; return ;
} }
std::vector<std::vector<float>> features; std::vector<std::vector<float>> features;
// 被搜索的目标这边推荐使用拷贝式的接口来获取特征向量 // 被搜索的目标这边推荐使用拷贝式的接口来获取特征向量
HInt32 featureNum; HInt32 featureNum;
...@@ -174,7 +172,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d ...@@ -174,7 +172,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
qInfo()<<QString("搜索失败: %1").arg(ret); qInfo()<<QString("搜索失败: %1").arg(ret);
return ; return ;
} }
qDebug()<<QString("搜索置信度: %1").arg(confidence); qDebug()<<QString("搜索置信度: %1").arg(confidence);
qDebug()<<QString("匹配到的tag: %1").arg(searchIdentity.tag); qDebug()<<QString("匹配到的tag: %1").arg(searchIdentity.tag);
qDebug()<<QString("匹配到的customId: %1").arg(searchIdentity.customId); qDebug()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
...@@ -193,7 +191,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d ...@@ -193,7 +191,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
} }
//printf("活体置信度: %f", livenessConfidence.confidence[0]); //printf("活体置信度: %f", livenessConfidence.confidence[0]);
qDebug()<<QString("活体置信度====>:%1").arg(livenessConfidence.confidence[0],0,'Q',4); qDebug()<<QString("活体置信度====>:%1").arg(livenessConfidence.confidence[0],0,'Q',4);
HF_FaceMaskConfidence maskConfidence = {0}; HF_FaceMaskConfidence maskConfidence = {0};
ret = HF_GetFaceMaskConfidence(ctxHandle, &maskConfidence); ret = HF_GetFaceMaskConfidence(ctxHandle, &maskConfidence);
if (ret != HSUCCEED) { if (ret != HSUCCEED) {
...@@ -217,7 +215,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d ...@@ -217,7 +215,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
newface.height=multipleFaceData.rects[rect].height; newface.height=multipleFaceData.rects[rect].height;
faces.push_back(newface); faces.push_back(newface);
} }
rect++; rect++;
} }
ret = HF_ReleaseImageStream(imageSteamHandle); ret = HF_ReleaseImageStream(imageSteamHandle);
......
...@@ -18,6 +18,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector ...@@ -18,6 +18,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector
int num = (res == 0) ? TCV_HumanDetectorGetNumOfHuman(detector) :TCV_HumanDetectorGetNumOfCar(detector); int num = (res == 0) ? TCV_HumanDetectorGetNumOfHuman(detector) :TCV_HumanDetectorGetNumOfCar(detector);
qDebug() << (res == 0 ? "Number of people detected:" : "Number of cars detected:") << num; qDebug() << (res == 0 ? "Number of people detected:" : "Number of cars detected:") << num;
TCV_ReleaseCameraStream(stream); TCV_ReleaseCameraStream(stream);
return num; return num;
} }
...@@ -131,6 +131,7 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString ...@@ -131,6 +131,7 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
newPlate.time=currentTime; newPlate.time=currentTime;
newPlate.new_color=QString::fromStdString(type); newPlate.new_color=QString::fromStdString(type);
newPlate.new_plate=QString::fromUtf8(results.plates[i].code); newPlate.new_plate=QString::fromUtf8(results.plates[i].code);
newPlate.text_confidence=results.plates[i].text_confidence;
vides_data::ParkingArea area; vides_data::ParkingArea area;
area.topLeftCornerX=results.plates[i].x1; area.topLeftCornerX=results.plates[i].x1;
area.topLeftCornerY=results.plates[i].y1; area.topLeftCornerY=results.plates[i].y1;
......
#include "ScopeSemaphoreExit.h"
ScopeSemaphoreExit::ScopeSemaphoreExit(std::function<void()> onExit)
: onExit_(onExit) {}
ScopeSemaphoreExit::~ScopeSemaphoreExit() {
if (onExit_) onExit_();
}
#ifndef SCOPESEMAPHOREEXIT_H
#define SCOPESEMAPHOREEXIT_H
#include <functional>
class ScopeSemaphoreExit {
public:
explicit ScopeSemaphoreExit(std::function<void()> onExit);
~ScopeSemaphoreExit();
private:
std::function<void()> onExit_;
};
#endif // SCOPESEMAPHOREEXIT_H
...@@ -133,6 +133,7 @@ struct LicensePlate ...@@ -133,6 +133,7 @@ struct LicensePlate
QByteArray img; QByteArray img;
qint64 time; qint64 time;
ParkingArea recognition; ParkingArea recognition;
float text_confidence;
LicensePlate() {} LicensePlate() {}
}; };
......
...@@ -93,7 +93,8 @@ SOURCES += \ ...@@ -93,7 +93,8 @@ SOURCES += \
TaskRunnable.cpp \ TaskRunnable.cpp \
CameraHandle.cpp \ CameraHandle.cpp \
ParkingSpaceInfo.cpp \ ParkingSpaceInfo.cpp \
HumanDetection.cpp HumanDetection.cpp \
ScopeSemaphoreExit.cpp
HEADERS += \ HEADERS += \
Common.h \ Common.h \
...@@ -109,7 +110,8 @@ HEADERS += \ ...@@ -109,7 +110,8 @@ HEADERS += \
TaskRunnable.h \ TaskRunnable.h \
CameraHandle.h \ CameraHandle.h \
ParkingSpaceInfo.h \ ParkingSpaceInfo.h \
HumanDetection.h HumanDetection.h \
ScopeSemaphoreExit.h
#FORMS += \ #FORMS += \
# mainwindow.ui # mainwindow.ui
......
...@@ -15,7 +15,7 @@ MainWindow::MainWindow() ...@@ -15,7 +15,7 @@ MainWindow::MainWindow()
modelPaths=qSetting->value("licensePlateRecognition/model_paths").toString(); modelPaths=qSetting->value("licensePlateRecognition/model_paths").toString();
initVideoOutPath(); initCommon();
deleteLogFileTimer =new QTimer(this); deleteLogFileTimer =new QTimer(this);
connect(deleteLogFileTimer, &QTimer::timeout, this, &MainWindow::deleteLogFile); connect(deleteLogFileTimer, &QTimer::timeout, this, &MainWindow::deleteLogFile);
...@@ -301,18 +301,28 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -301,18 +301,28 @@ void MainWindow::startCamera(const QString &httpurl){
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
QString nonConstHttpUrl = std::remove_const<QString>::type(httpurl); QString nonConstHttpUrl = std::remove_const<QString>::type(httpurl);
vides_data::responseDeviceData devices; vides_data::responseDeviceData devices;
// QString serialNumber = QSysInfo::machineUniqueId(); // QString serialNumber = QSysInfo::machineUniqueId();
QString serialNumber; QString serialNumber;
findLocalSerialNumber(serialNumber); findLocalSerialNumber(serialNumber);
vides_data::requestDeviceStatus reStatus;
reStatus.sSn=serialNumber;
reStatus.status=1;
reStatus.type=1;
reStatus.ip_addr=instace.GetLocalIp();
HttpService httpService(httpurl); HttpService httpService(httpurl);
vides_data::response *res=httpService.httpPostDeviceStatus(reStatus);
if(res->code!=0){
qInfo()<<"盒子状态上报失败 code:"<<res->code<<"msg:"<<res->data;
}
instace.deleteObj(res);
httpService.setHttpUrl(httpurl);
vides_data::response *re= httpService.httpFindCameras(serialNumber,devices); vides_data::response *re= httpService.httpFindCameras(serialNumber,devices);
if(re->code==0){ if(re->code==0 || re->code==20004){
if(devices.list.size()<=0){
instace.deleteObj(re);
return;
}
QString username = qSetting->value("devices/username").toString(); QString username = qSetting->value("devices/username").toString();
QString password = qSetting->value("devices/password").toString(); QString password = qSetting->value("devices/password").toString();
std::map<QString,vides_data::localDeviceStatus*> localDevices; std::map<QString,vides_data::localDeviceStatus*> localDevices;
...@@ -329,7 +339,7 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -329,7 +339,7 @@ void MainWindow::startCamera(const QString &httpurl){
QString key = ipAddress + ":" + QString::number(localDevice->TCPPort); QString key = ipAddress + ":" + QString::number(localDevice->TCPPort);
if(faceDetectionParkingPushs.count(key)<=0){ if(faceDetectionParkingPushs.count(key)<=0){
httpService.setHttpUrl(httpurl); httpService.setHttpUrl(httpurl);
vides_data::cameraParameters parameter; vides_data::cameraParameters parameter;
parameter.sDevId=ipAddress; parameter.sDevId=ipAddress;
parameter.nDevPort=localDevice->TCPPort; parameter.nDevPort=localDevice->TCPPort;
...@@ -354,7 +364,7 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -354,7 +364,7 @@ void MainWindow::startCamera(const QString &httpurl){
} }
} }
this->deleteCloudNotCamer(localDevices, devices.list); this->deleteCloudNotCamer(localDevices, devices.list);
for (auto& pair : localDevices) { for (auto& pair : localDevices) {
if (pair.second != nullptr) { // 如果对象未被删除(即不为nullptr) if (pair.second != nullptr) { // 如果对象未被删除(即不为nullptr)
instace.deleteObj(pair.second); instace.deleteObj(pair.second);
...@@ -363,24 +373,10 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -363,24 +373,10 @@ void MainWindow::startCamera(const QString &httpurl){
// 清空 localDevices 容器 // 清空 localDevices 容器
localDevices.clear(); localDevices.clear();
} }
vides_data::requestDeviceStatus reStatus;
reStatus.sSn=serialNumber;
reStatus.status=1;
reStatus.type=1;
reStatus.ip_addr=instace.GetLocalIp();
qDebug()<<"local.ip_addr===>"<<reStatus.ip_addr;
httpService.setHttpUrl(httpurl);
qDebug()<<"httpurl===>"<<httpurl;
qDebug()<<"serialNumber===>"<<serialNumber;
vides_data::response *res=httpService.httpPostDeviceStatus(reStatus);
if(res->code!=0){
qInfo()<<"盒子状态上报失败 code:"<<res->code<<"msg:"<<res->data;
}
updateLocalFace(httpurl); updateLocalFace(httpurl);
instace.deleteObj(re); instace.deleteObj(re);
instace.deleteObj(res);
} }
bool MainWindow::isDeviceInList(const QString& deviceId, const std::list<vides_data::responseDeviceStatus>& devices) { bool MainWindow::isDeviceInList(const QString& deviceId, const std::list<vides_data::responseDeviceStatus>& devices) {
...@@ -718,7 +714,9 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std:: ...@@ -718,7 +714,9 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
cameraHandle->sdkDevSetAlarmListener(sdk_handle,1); cameraHandle->sdkDevSetAlarmListener(sdk_handle,1);
int synTime=qSetting->value("timer/dev_snap_syn_timer").toInt(); int synTime=qSetting->value("timer/dev_snap_syn_timer").toInt();
cameraHandle->initSdkRealTimeDevSnapSyn(sdk_handle,synTime); uint64 face_frequency=qSetting->value("devices/face_frequency").toULongLong();
cameraHandle->initSdkRealTimeDevSnapSyn(sdk_handle,synTime,face_frequency);
int seTime=qSetting->value("timer/semaphore_time").toInt(); int seTime=qSetting->value("timer/semaphore_time").toInt();
cameraHandle->setTimeoutMs(seTime); cameraHandle->setTimeoutMs(seTime);
cameraHandle->initParkingSpaceInfo(areas); cameraHandle->initParkingSpaceInfo(areas);
...@@ -777,11 +775,15 @@ void MainWindow::createDirectory(int flag,const QString& dirName, const QString& ...@@ -777,11 +775,15 @@ void MainWindow::createDirectory(int flag,const QString& dirName, const QString&
} }
} }
void MainWindow::initVideoOutPath(){ void MainWindow::initCommon(){
createDirectory(0x01,"frame_images", "目录创建成功", "目录创建失败"); createDirectory(0x01,"frame_images", "目录创建成功", "目录创建失败");
createDirectory(0x00,"frame_video", "创建视频目录成功", "视频目录创建失败"); createDirectory(0x00,"frame_video", "创建视频目录成功", "视频目录创建失败");
createDirectory(0x02,"images", "图片目录创建成功", "图片目录创建失败"); createDirectory(0x02,"images", "图片目录创建成功", "图片目录创建失败");
float carConfidenceMax=qSetting->value("devices/carConfidenceMax").toFloat();
float carConfidenceMin=qSetting->value("devices/carConfidenceMin").toFloat();
Common& instance = Common::getInstance();
instance.setCarConfidenceMax(carConfidenceMax);
instance.setCarConfidenceMin(carConfidenceMin);
} }
MainWindow::~MainWindow() MainWindow::~MainWindow()
......
...@@ -34,12 +34,13 @@ class MainWindow : public QObject ...@@ -34,12 +34,13 @@ class MainWindow : public QObject
public: public:
explicit MainWindow(); explicit MainWindow();
void initVideoOutPath(); void initCommon();
void setVideoPath(int flag, const QString& path); void setVideoPath(int flag, const QString& path);
void createDirectory(int flag,const QString& dirName, const QString& successMsg, const QString& failureMsg); void createDirectory(int flag,const QString& dirName, const QString& successMsg, const QString& failureMsg);
void initFaceFaceRecognition(); void initFaceFaceRecognition();
void initCameras(vides_data::cameraParameters &parameter,const std::list<vides_data::responseArea>&areas); void initCameras(vides_data::cameraParameters &parameter,const std::list<vides_data::responseArea>&areas);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment