Commit 784d6dae by “liusq”

新增算法权限和推送设备版本,ip

parent 264b6d6a
...@@ -4,7 +4,7 @@ ...@@ -4,7 +4,7 @@
#include "ScopeSemaphoreExit.h" #include "ScopeSemaphoreExit.h"
#include <QRegularExpression> #include <QRegularExpression>
CameraHandle::CameraHandle(){ CameraHandle::CameraHandle(){
} }
CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &channel,const QString &modelPaths, float carConfidence,int imageSave) CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &channel,const QString &modelPaths, float carConfidence,int imageSave)
: hDevice(-1), : hDevice(-1),
...@@ -18,8 +18,8 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch ...@@ -18,8 +18,8 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch
image_save(imageSave), image_save(imageSave),
semaphore(1) { semaphore(1) {
connect(this, SIGNAL(afterDownloadFile(int,int,QString)), this, SLOT(pushRecordToCloud(int,int,QString)),Qt::QueuedConnection); connect(this, SIGNAL(afterDownloadFile(int,int,QString)), this, SLOT(pushRecordToCloud(int,int,QString)),Qt::QueuedConnection);
detector = TCV_CreateHumanDetector(); detector = TCV_CreateHumanDetector(1);
// 设置检测得分阈值 默认0.5
TCV_HumanDetectorSetHumanThreshold(detector,0.5f); TCV_HumanDetectorSetHumanThreshold(detector,0.5f);
TCV_HumanDetectorSetCarThreshold(detector,0.2f); TCV_HumanDetectorSetCarThreshold(detector,0.2f);
...@@ -35,7 +35,7 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch ...@@ -35,7 +35,7 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch
configuration.box_conf_threshold = 0.30f; configuration.box_conf_threshold = 0.30f;
configuration.threads = 1; configuration.threads = 1;
ctx = HLPR_CreateContext(&configuration); ctx = HLPR_CreateContext(&configuration);
} }
CameraHandle::~CameraHandle() { CameraHandle::~CameraHandle() {
...@@ -53,22 +53,22 @@ CameraHandle::~CameraHandle() { ...@@ -53,22 +53,22 @@ CameraHandle::~CameraHandle() {
} }
parkMap.clear(); parkMap.clear();
QThreadPool::globalInstance()->waitForDone(); QThreadPool::globalInstance()->waitForDone();
} }
int CameraHandle::sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout) { int CameraHandle::sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout) {
QByteArray byteArray = sDevId.toUtf8(); QByteArray byteArray = sDevId.toUtf8();
char* cDevid=byteArray.data(); char* cDevid=byteArray.data();
strcpy(loginParam->sDevId, cDevid); strcpy(loginParam->sDevId, cDevid);
loginParam->nDevPort=nDevPort; loginParam->nDevPort=nDevPort;
QByteArray byteName = sUserName.toUtf8(); QByteArray byteName = sUserName.toUtf8();
char* cName=byteName.data(); char* cName=byteName.data();
strcpy(loginParam->sUserName, cName); strcpy(loginParam->sUserName, cName);
if(sPassword.length()>0){ if(sPassword.length()>0){
QByteArray bytePassword = sPassword.toUtf8(); QByteArray bytePassword = sPassword.toUtf8();
strcpy(loginParam->sPassword, bytePassword.constData()); strcpy(loginParam->sPassword, bytePassword.constData());
...@@ -97,17 +97,17 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -97,17 +97,17 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
// 帧信息 // 帧信息
SXSDK_FRAME_INFO* pFrame = (SXSDK_FRAME_INFO*)pDataInfo; SXSDK_FRAME_INFO* pFrame = (SXSDK_FRAME_INFO*)pDataInfo;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
if (pFrame->nType == XSDK_FRAME_TYPE_VIDEO && pFrame->nSubType == XSDK_FRAME_TYPE_VIDEO_I_FRAME) if (pFrame->nType == XSDK_FRAME_TYPE_VIDEO && pFrame->nSubType == XSDK_FRAME_TYPE_VIDEO_I_FRAME)
{ {
//printf("[%d]::OnFrame[Len:%d][Type:%d/%d][%04d-%02d-%02d %02d:%02d:%02d-%03d]\r\n", hMedia, pFrame->nLength, pFrame->nType, pFrame->nSubType, pFrame->nYear, pFrame->nMonth, pFrame->nDay, pFrame->nHour, pFrame->nMinute, pFrame->nSecond, (int)(pFrame->nTimeStamp % 1000)); //printf("[%d]::OnFrame[Len:%d][Type:%d/%d][%04d-%02d-%02d %02d:%02d:%02d-%03d]\r\n", hMedia, pFrame->nLength, pFrame->nType, pFrame->nSubType, pFrame->nYear, pFrame->nMonth, pFrame->nDay, pFrame->nHour, pFrame->nMinute, pFrame->nSecond, (int)(pFrame->nTimeStamp % 1000));
} }
if (cameraHandle->getMediaHandle() > 0 && cameraHandle->getMediaHandle()== hMedia) if (cameraHandle->getMediaHandle() > 0 && cameraHandle->getMediaHandle()== hMedia)
{ {
if (pFrame->nSubType == XSDK_ENCODE_VIDEO_JPEG) if (pFrame->nSubType == XSDK_ENCODE_VIDEO_JPEG)
{ {
mediaFaceImage->AbFile(cFname, pFrame->pContent, pFrame->nFrameLength); mediaFaceImage->AbFile(cFname, pFrame->pContent, pFrame->nFrameLength);
} }
else else
...@@ -118,7 +118,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -118,7 +118,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
mediaFaceImage->AbFile(cFname, pFrame->pHeader, pFrame->nLength); mediaFaceImage->AbFile(cFname, pFrame->pHeader, pFrame->nLength);
} }
} }
} }
else if (ESXSDK_MEDIA_START_REAL_PLAY == nDataType else if (ESXSDK_MEDIA_START_REAL_PLAY == nDataType
|| ESXSDK_MEDIA_START_FACE_IMAGE == nDataType || ESXSDK_MEDIA_START_FACE_IMAGE == nDataType
...@@ -129,7 +129,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -129,7 +129,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
) )
{ {
int& nResult = nDataLen; int& nResult = nDataLen;
} }
else if (EXCMD_MONITOR_DATA == nDataType else if (EXCMD_MONITOR_DATA == nDataType
|| EXCMD_DOWNLOAD_DATA == nDataType || EXCMD_DOWNLOAD_DATA == nDataType
...@@ -143,16 +143,16 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -143,16 +143,16 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
{ {
printf("[%d]::OnMedia[%d][DataLen:%d]\r\n", hMedia, nDataType, nDataLen); printf("[%d]::OnMedia[%d][DataLen:%d]\r\n", hMedia, nDataType, nDataLen);
}*/ }*/
if (EXCMD_DOWNLOAD_DATA == nDataType) if (EXCMD_DOWNLOAD_DATA == nDataType)
{ {
qDebug()<<"EXCMD_DOWNLOAD_DATA"<<nDataType; qDebug()<<"EXCMD_DOWNLOAD_DATA"<<nDataType;
/*if (g_hRecordDownload > 0 && g_hRecordDownload == hMedia) /*if (g_hRecordDownload > 0 && g_hRecordDownload == hMedia)
{ {
std::string::size_type pos = g_test.sDownloadFileName.rfind('.'); std::string::size_type pos = g_test.sDownloadFileName.rfind('.');
std::string strSuffix = g_test.sDownloadFileName.substr(pos + 1, pos + 2); std::string strSuffix = g_test.sDownloadFileName.substr(pos + 1, pos + 2);
if (STRCMP(strSuffix.c_str(), "jpg") == 0) if (STRCMP(strSuffix.c_str(), "jpg") == 0)
{ {
const char* pWriteData = (const char*)pData; const char* pWriteData = (const char*)pData;
...@@ -160,10 +160,10 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -160,10 +160,10 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
{ {
pWriteData += 16; pWriteData += 16;
nDataLen -= 16; nDataLen -= 16;
++stInit; ++stInit;
} }
XFILE::ABFile(g_test.sDownloadFileName.c_str(), pWriteData, nDataLen); XFILE::ABFile(g_test.sDownloadFileName.c_str(), pWriteData, nDataLen);
} }
else else
...@@ -181,7 +181,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -181,7 +181,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
{ {
//int& nState = nDataLen; //int& nState = nDataLen;
//printf("[%d]::OnMediaStateChannged[nState:%d]\r\n", hMedia, nState); //printf("[%d]::OnMediaStateChannged[nState:%d]\r\n", hMedia, nState);
if (nDataLen == EState_Media_DataEnd) if (nDataLen == EState_Media_DataEnd)
{ {
if (cameraHandle->getMediaHandle() > 0) if (cameraHandle->getMediaHandle() > 0)
...@@ -191,11 +191,11 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -191,11 +191,11 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
QString mp4FileName =dName; QString mp4FileName =dName;
mp4FileName.replace(QRegularExpression("\\.[^\\.]*$"), ".mp4"); mp4FileName.replace(QRegularExpression("\\.[^\\.]*$"), ".mp4");
data["downloadFileName"]=mp4FileName; data["downloadFileName"]=mp4FileName;
QProcess ffmpegProcess; QProcess ffmpegProcess;
QStringList arguments; QStringList arguments;
arguments << "-i" << dName <<"-c:v" << "copy" << mp4FileName; arguments << "-i" << dName <<"-c:v" << "copy" << mp4FileName;
ffmpegProcess.start("ffmpeg", arguments); ffmpegProcess.start("ffmpeg", arguments);
// 等待 ffmpeg 进程结束 // 等待 ffmpeg 进程结束
if (ffmpegProcess.waitForFinished(20000)) { if (ffmpegProcess.waitForFinished(20000)) {
...@@ -204,7 +204,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -204,7 +204,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
} else { } else {
qDebug() << "Error: ffmpeg process did not finish."; qDebug() << "Error: ffmpeg process did not finish.";
} }
// 销毁 QProcess 对象 // 销毁 QProcess 对象
ffmpegProcess.close(); ffmpegProcess.close();
QFileInfo fileInfo(mp4FileName); QFileInfo fileInfo(mp4FileName);
...@@ -220,7 +220,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -220,7 +220,7 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
} }
} }
} }
} }
int CameraHandle::sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener) { int CameraHandle::sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener) {
return XSDK_DevSetAlarmListener(hDevice,bListener); return XSDK_DevSetAlarmListener(hDevice,bListener);
...@@ -247,18 +247,18 @@ void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 fa ...@@ -247,18 +247,18 @@ void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 fa
dev_snap_syn_timer->start(syn_timer); dev_snap_syn_timer->start(syn_timer);
} }
void CameraHandle::sdkRealTimeDevSnapSyn(int hDevice) { void CameraHandle::sdkRealTimeDevSnapSyn(int hDevice) {
QThreadPool* threadPool = QThreadPool::globalInstance(); QThreadPool* threadPool = QThreadPool::globalInstance();
threadPool->setMaxThreadCount(8); threadPool->setMaxThreadCount(8);
auto taskSyn = std::bind(&CameraHandle::sdkDevSnapSyn, this, hDevice, this->channel); auto taskSyn = std::bind(&CameraHandle::sdkDevSnapSyn, this, hDevice, this->channel);
auto taskRunnable = new TaskRunnable(taskSyn, hDevice, this->channel, RunFunction::SdkDevSnapSyn); auto taskRunnable = new TaskRunnable(taskSyn, hDevice, this->channel, RunFunction::SdkDevSnapSyn);
threadPool->start(taskRunnable); threadPool->start(taskRunnable);
} }
QString CameraHandle::getSSn(){ QString CameraHandle::getSSn(){
return sSn; return sSn;
...@@ -296,12 +296,12 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id, ...@@ -296,12 +296,12 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
newSn.append(szTime); newSn.append(szTime);
//downloadFileName=QString("%1/%2.h264").arg(videoPath, szTime); //downloadFileName=QString("%1/%2.h264").arg(videoPath, szTime);
currentData["downloadFileName"] =QString("%1%2.h264").arg(videoPath, newSn); currentData["downloadFileName"] =QString("%1%2.h264").arg(videoPath, newSn);
SXMediaRecordReq param = { 0 }; SXMediaRecordReq param = { 0 };
QByteArray bStart =startTimer.toUtf8(); QByteArray bStart =startTimer.toUtf8();
const char* cStart=bStart.data(); const char* cStart=bStart.data();
QByteArray bEnd=endTime.toUtf8(); QByteArray bEnd=endTime.toUtf8();
const char* cEnd=bEnd.data(); const char* cEnd=bEnd.data();
strcpy(param.sBeginTime, cStart); // 请求的开始时间(必填) strcpy(param.sBeginTime, cStart); // 请求的开始时间(必填)
...@@ -313,16 +313,16 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id, ...@@ -313,16 +313,16 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
SMsgReceiver sms(nullptr,XNetSDK_MediaCallBack,this); SMsgReceiver sms(nullptr,XNetSDK_MediaCallBack,this);
param.result=sms; param.result=sms;
qDebug() << "XSDK_MediaRecordDownload hDevice:"<<this->hDevice; qDebug() << "XSDK_MediaRecordDownload hDevice:"<<this->hDevice;
this->mediaHandle = XSDK_MediaRecordDownload(this->hDevice, &param, 0, 4000); this->mediaHandle = XSDK_MediaRecordDownload(this->hDevice, &param, 0, 4000);
if ( this->mediaHandle < 0) if ( this->mediaHandle < 0)
{ {
qInfo() << "XSDK_MediaRecordDownload Failed:"<< this->mediaHandle ; qInfo() << "XSDK_MediaRecordDownload Failed:"<< this->mediaHandle ;
return ; return ;
} }
} }
int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) { int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
if (!semaphore.tryAcquire()) { if (!semaphore.tryAcquire()) {
...@@ -344,7 +344,7 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) { ...@@ -344,7 +344,7 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
<< "\r\nChannel:" << alarmInfo.Channel.Value() << "\r\nChannel:" << alarmInfo.Channel.Value()
<< "\r\nStartTime:" << alarmInfo.StartTime.Value() << "\r\nStartTime:" << alarmInfo.StartTime.Value()
<< "\r\nStatus:" << alarmInfo.Status.Value(); << "\r\nStatus:" << alarmInfo.Status.Value();
} }
else else
{ {
...@@ -354,7 +354,7 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) { ...@@ -354,7 +354,7 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch(); qint64 currentTime= QDateTime::currentSecsSinceEpoch();
mediaFaceImage->FaceImageCallBack(hObject,this->channel,image); mediaFaceImage->FaceImageCallBack(hObject,this->channel,image);
if (image.empty()) if (image.empty())
{ {
qInfo() << "Failed to read the image"; qInfo() << "Failed to read the image";
...@@ -383,12 +383,12 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){ ...@@ -383,12 +383,12 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch(); qint64 currentTime= QDateTime::currentSecsSinceEpoch();
int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image); int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image);
qDebug() << "SdkDevSnapSyn HTTP POST request to: " << sSn; qDebug() << "SdkDevSnapSyn HTTP POST request to: " << sSn;
if (ret < 0) { if (ret < 0) {
offlineCount++; // 累加计数器 offlineCount++; // 累加计数器
qDebug() << "offlineCount: " << loginParam->sDevId<<offlineCount; qDebug() << "offlineCount: " << loginParam->sDevId<<offlineCount;
if (offlineCount >= 3) { // 判断是否连续3次返回0 if (offlineCount >= 3) { // 判断是否连续3次返回0
qInfo() << "设备离线"; qInfo() << "设备离线";
QString ip=QString::fromUtf8(loginParam->sDevId); QString ip=QString::fromUtf8(loginParam->sDevId);
...@@ -414,10 +414,6 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){ ...@@ -414,10 +414,6 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
updateImage(image, currentTime); updateImage(image, currentTime);
} }
void CameraHandle::setTimeoutMs(int timeoutMs){
this->timeoutMs=timeoutMs;
}
void CameraHandle::matToBase64(const cv::Mat &image, QByteArray &base64Data) { void CameraHandle::matToBase64(const cv::Mat &image, QByteArray &base64Data) {
std::vector<unsigned char> buffer; std::vector<unsigned char> buffer;
std::vector<int> params{cv::IMWRITE_JPEG_QUALITY, 90}; std::vector<int> params{cv::IMWRITE_JPEG_QUALITY, 90};
...@@ -458,7 +454,7 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma ...@@ -458,7 +454,7 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
qDebug()<<sSn<<":"<<"出场:"<<2; qDebug()<<sSn<<":"<<"出场:"<<2;
qDebug()<<sSn<<":"<<"老车出场:"<<park->getCurrentPlate().getLicensePlate(); qDebug()<<sSn<<":"<<"老车出场:"<<park->getCurrentPlate().getLicensePlate();
qDebug()<<sSn<<":"<<"老车出场:"<<park->getCurrentPlate().getLicensePlate(); qDebug()<<sSn<<":"<<"老车出场:"<<park->getCurrentPlate().getLicensePlate();
//当前不为空,新车,新车入场,老车出场 //当前不为空,新车,新车入场,老车出场
exitAndMoMap[Exit]=park->getCurrentPlate(); exitAndMoMap[Exit]=park->getCurrentPlate();
exitAndMoMap[Mobilization]=newInfo; exitAndMoMap[Mobilization]=newInfo;
...@@ -472,42 +468,59 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma ...@@ -472,42 +468,59 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
qDebug()<<"=============================>"; qDebug()<<"=============================>";
static int i=0; static int i=0;
printf("updateImage%d次\n", ++i); printf("updateImage%d次\n", ++i);
faceCount.fetch_add(1, std::memory_order_relaxed); faceCount.fetch_add(1, std::memory_order_relaxed);
qDebug()<<"faceCount==>"<<faceCount.load(std::memory_order_relaxed); qDebug()<<"faceCount==>"<<faceCount.load(std::memory_order_relaxed);
int width = frame.cols; // 获取图像宽度 int width = frame.cols; // 获取图像宽度
int height = frame.rows; // 获取图像高度 int height = frame.rows; // 获取图像高度
qDebug()<<"frame 宽度:"<<width<<"frame 高度:"<<height; qDebug()<<"frame 宽度:"<<width<<"frame 高度:"<<height;
FaceReconition &faceRecognition = FaceReconition::getInstance(); FaceReconition &faceRecognition = FaceReconition::getInstance();
HumanDetection &humanDetection=HumanDetection::getInstance(); HumanDetection &humanDetection=HumanDetection::getInstance();
LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance(); LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance();
static int ii=0; static int ii=0;
printf("updateImage retryCount: %d \n", ++ii); printf("updateImage retryCount: %d \n", ++ii);
//faceRecognition.search(frame,imageHandleList,names); //faceRecognition.search(frame,imageHandleList,names);
QByteArray imgs; QByteArray imgs;
int faSize=humanDetection.findHuManCar(frame,0,detector);
this->matToBase64(frame, imgs); this->matToBase64(frame, imgs);
HttpService httpService(httpUrl); HttpService httpService(httpUrl);
if(currentFace!=faSize){ int faSize =-1;
if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){ if ((algorithmPermissions & 0x01<<1) != 0) {
vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,sSn,currentTime); faSize=humanDetection.findHuManCar(frame,0,detector);
if (resp->code!= 0) {
qInfo()<<"人数变化推送信息推送失败"; if(currentFace!=faSize){
if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){
vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,sSn,currentTime);
if (resp->code!= 0) {
qInfo()<<"人数变化推送信息推送失败";
}
instace.deleteObj(resp);
currentFace=faSize;
}
}
}
if ((algorithmPermissions & 0x01<<2) != 0) {
int uniforms=humanDetection.findHuManCar(frame,0x02,detector);
if(uniforms>0 ){
if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){
httpService.setHttpUrl(httpUrl);
vides_data::response* resp=httpService.httpPostUniforms(imgs,faSize,sSn,currentTime);
if (resp->code!= 0) {
qInfo()<<"推送未穿工服人数失败";
}
instace.deleteObj(resp);
} }
instace.deleteObj(resp);
currentFace=faSize;
} }
} }
if(faSize>0){ if(faSize>0 ){
qDebug() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId(); qDebug() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId();
std::list<vides_data::faceRecognitionResult>faces; std::list<vides_data::faceRecognitionResult>faces;
faceRecognition.doesItExistEmployee(frame,faces); faceRecognition.doesItExistEmployee(frame,faces);
...@@ -524,38 +537,42 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -524,38 +537,42 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
faceReconition.area.bottom_right_corner_y= face.y + face.height; faceReconition.area.bottom_right_corner_y= face.y + face.height;
faceReconition.area.bottom_left_corner_x = face.x; faceReconition.area.bottom_left_corner_x = face.x;
faceReconition.area.bottom_left_corner_y = face.y + face.height; faceReconition.area.bottom_left_corner_y = face.y + face.height;
faceReconition.area.top_right_corner_x = face.x + face.width; faceReconition.area.top_right_corner_x = face.x + face.width;
faceReconition.area.top_right_corner_y = face.y; faceReconition.area.top_right_corner_y = face.y;
httpService.setHttpUrl(httpUrl); httpService.setHttpUrl(httpUrl);
vides_data::response* resp = httpService.httpPostFaceReconition(faceReconition); vides_data::response* resp = httpService.httpPostFaceReconition(faceReconition);
if (resp->code!= 0) { if (resp->code!= 0) {
qInfo()<<"识别人code"<<resp->code; qInfo()<<"识别人code"<<resp->code;
qInfo()<<"识别人msg"<<resp->msg; qInfo()<<"识别人msg"<<resp->msg;
qInfo()<<"识别人脸信息推送失败"<<face.id; qInfo()<<"识别人脸信息推送失败"<<face.id;
} }
instace.deleteObj(resp); instace.deleteObj(resp);
} }
} }
} }
//关闭车牌识别
if ((algorithmPermissions & 0x01) == 0) {
return ;
}
QString lpNumber; QString lpNumber;
vides_data::requestLicensePlate plate; vides_data::requestLicensePlate plate;
plate.sn=sSn; plate.sn=sSn;
// return ; // return ;
if(image_save==1){ if(image_save==1){
QString fileName= instace.getVideoOut().append(instace.getTimeString()+".jpg"); QString fileName= instace.getVideoOut().append(instace.getTimeString()+".jpg");
bool success = cv::imwrite(fileName.toStdString(), frame); bool success = cv::imwrite(fileName.toStdString(), frame);
if (success) { if (success) {
qDebug() << "图片已成功保存至:" << fileName; qDebug() << "图片已成功保存至:" << fileName;
} else { } else {
qDebug() << "图片保存失败!"; qDebug() << "图片保存失败!";
} }
} }
licensePlateRecogn.licensePlateNumber(frame, lpNumber,plate,currentTime,ctx); licensePlateRecogn.licensePlateNumber(frame, lpNumber,plate,currentTime,ctx);
std::map<int,RecognizedInfo>exitMoMap; std::map<int,RecognizedInfo>exitMoMap;
vides_data::requestLicensePlate newPlate; vides_data::requestLicensePlate newPlate;
...@@ -638,7 +655,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -638,7 +655,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
this->checkAndUpdateCurrentPlate(value,frame,recognizedInfo,res,exitMoMap); this->checkAndUpdateCurrentPlate(value,frame,recognizedInfo,res,exitMoMap);
} }
qDebug()<<sSn<<"==>checkAndUpdateCurrentPlate结果是"<<res; qDebug()<<sSn<<"==>checkAndUpdateCurrentPlate结果是"<<res;
if (res == Exit || res == Mobilization) { if (res == Exit || res == Mobilization) {
recognition.areaLocation=value->getArea(); recognition.areaLocation=value->getArea();
recognition.img=imgs; recognition.img=imgs;
...@@ -651,7 +668,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -651,7 +668,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
recognition.img=imgs; recognition.img=imgs;
recognition.new_color=recognizedInfo.getColor(); recognition.new_color=recognizedInfo.getColor();
newPlate.plates.push_back(std::move(recognition)); newPlate.plates.push_back(std::move(recognition));
RecognizedInfo exitInfo=exitMoMap[Exit]; RecognizedInfo exitInfo=exitMoMap[Exit];
vides_data::LicensePlate oldInfo; vides_data::LicensePlate oldInfo;
oldInfo.areaLocation=value->getArea(); oldInfo.areaLocation=value->getArea();
...@@ -670,7 +687,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -670,7 +687,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
value->addQueue(recognizedInfo); value->addQueue(recognizedInfo);
int res; int res;
this->checkAndUpdateCurrentPlate(value, frame, recognizedInfo, res,exitMoMap); this->checkAndUpdateCurrentPlate(value, frame, recognizedInfo, res,exitMoMap);
if (res == Exit || res == Mobilization) { if (res == Exit || res == Mobilization) {
vides_data::LicensePlate current; vides_data::LicensePlate current;
current.areaLocation = value->getArea(); current.areaLocation = value->getArea();
...@@ -688,7 +705,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -688,7 +705,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
current.new_color = recognizedInfo.getColor(); current.new_color = recognizedInfo.getColor();
current.new_plate = recognizedInfo.getLicensePlate(); current.new_plate = recognizedInfo.getLicensePlate();
current.time = recognizedInfo.getRecognizeTime(); current.time = recognizedInfo.getRecognizeTime();
newPlate.plates.push_back(std::move(current)); newPlate.plates.push_back(std::move(current));
RecognizedInfo exitInfo=exitMoMap[Exit]; RecognizedInfo exitInfo=exitMoMap[Exit];
vides_data::LicensePlate oldInfo; vides_data::LicensePlate oldInfo;
...@@ -710,7 +727,23 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -710,7 +727,23 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
} }
} }
} }
void CameraHandle::findIp(QString &ip){
ip=QString::fromStdString(loginParam->sDevId);
}
void CameraHandle::findFirmwareVersion(QString &firmwareVersion){
char szOutBuffer[1024] = { 0 };
int nLen = sizeof(szOutBuffer);;
int nResult = XSDK_DevGetSysConfigSyn(hDevice, JK_SystemInfo, szOutBuffer, &nLen, 4000, JK_SystemInfo_MsgId);
if (nResult >= 0)
{
XSDK_CFG::SystemInfo cfg;
cfg.Parse(szOutBuffer);
const char* SoftWareVersion = cfg.SoftWareVersion.ToString();
firmwareVersion=QString::fromStdString(SoftWareVersion);
}
}
void CameraHandle::pushRecordToCloud(int id, int recognitionType, QString ossUrl){ void CameraHandle::pushRecordToCloud(int id, int recognitionType, QString ossUrl){
HttpService httpService(ossUrl); HttpService httpService(ossUrl);
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
...@@ -731,7 +764,7 @@ void CameraHandle::pushRecordToCloud(int id, int recognitionType, QString ossUrl ...@@ -731,7 +764,7 @@ void CameraHandle::pushRecordToCloud(int id, int recognitionType, QString ossUrl
instace.deleteObj(reco); instace.deleteObj(reco);
} }
instace.deleteObj(res); instace.deleteObj(res);
} }
void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlate &location){ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlate &location){
...@@ -744,7 +777,7 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat ...@@ -744,7 +777,7 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat
HttpService httpService(httpUrl); HttpService httpService(httpUrl);
std::list<vides_data::responseRecognitionData> result; std::list<vides_data::responseRecognitionData> result;
vides_data::response*resp= httpService.httpLicensePlateRecognition(location,result); vides_data::response*resp= httpService.httpLicensePlateRecognition(location,result);
if (resp->code == 0) { if (resp->code == 0) {
if(result.size()==0){ if(result.size()==0){
return ; return ;
...@@ -764,7 +797,7 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat ...@@ -764,7 +797,7 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat
// data.recognitionType=var.recognitionType; // data.recognitionType=var.recognitionType;
// data.sn=var.sn; // data.sn=var.sn;
// videoCurrentData[var.id]=data; // videoCurrentData[var.id]=data;
// sdkDownloadFileByTime(this->hDevice,var.id, // sdkDownloadFileByTime(this->hDevice,var.id,
// instace.timestampToDateString(var.inTime),instace.timestampToDateString(var.outTime)); // instace.timestampToDateString(var.inTime),instace.timestampToDateString(var.outTime));
// } // }
...@@ -778,7 +811,7 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat ...@@ -778,7 +811,7 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat
qInfo()<<"licensePlateRecognitionResults:车牌识别结果失败"; qInfo()<<"licensePlateRecognitionResults:车牌识别结果失败";
// 在达到最大重试次数且仍然没有成功的情况下执行相应的处理逻辑 // 在达到最大重试次数且仍然没有成功的情况下执行相应的处理逻辑
} }
} }
void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){ void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){
QByteArray bTime =time.toUtf8(); QByteArray bTime =time.toUtf8();
...@@ -786,7 +819,7 @@ void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){ ...@@ -786,7 +819,7 @@ void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){
char outBuffer[512] = { 0 }; char outBuffer[512] = { 0 };
int nInOutBufSize = sizeof(outBuffer); int nInOutBufSize = sizeof(outBuffer);
const char* zoneCfg ="{ \"FirstUserTimeZone\" : \"true\", \"OPTimeSetting\" : \"800\" }"; const char* zoneCfg ="{ \"FirstUserTimeZone\" : \"true\", \"OPTimeSetting\" : \"800\" }";
int res = XSDK_DevSetSysConfigSyn(hDevice, JK_System_TimeZone, zoneCfg, strlen(zoneCfg), outBuffer, &nInOutBufSize, 5000, EXCMD_CONFIG_GET); int res = XSDK_DevSetSysConfigSyn(hDevice, JK_System_TimeZone, zoneCfg, strlen(zoneCfg), outBuffer, &nInOutBufSize, 5000, EXCMD_CONFIG_GET);
if(res<0){ if(res<0){
qInfo() << "FirstUserTimeZone:修改失败"; qInfo() << "FirstUserTimeZone:修改失败";
...@@ -799,7 +832,7 @@ void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){ ...@@ -799,7 +832,7 @@ void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){
//录像设置 //录像设置
void CameraHandle::sdkRecordCfg(const char * recordJson){ void CameraHandle::sdkRecordCfg(const char * recordJson){
qDebug()<<recordJson; qDebug()<<recordJson;
char szOutBuffer[512] = { 0 }; char szOutBuffer[512] = { 0 };
int nLen = sizeof(szOutBuffer);; int nLen = sizeof(szOutBuffer);;
...@@ -826,8 +859,22 @@ void CameraHandle::sdkDevSpvMn(const char *spvMn){ ...@@ -826,8 +859,22 @@ void CameraHandle::sdkDevSpvMn(const char *spvMn){
qInfo() << "sdkDevSpvMn 28181->修改失败"<<res; qInfo() << "sdkDevSpvMn 28181->修改失败"<<res;
} }
} }
bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2) { void CameraHandle::deviceReboot(){
int nRet=0;
XSDK_CFG::OPMachine cfg;
cfg.Action.SetValue("Reboot");
const char* pCfg = cfg.ToString();
nRet = XSDK_DevSetSysConfig(hDevice, JK_OPMachine, pCfg, strlen(pCfg), 1, 5000, EXCMD_SYSMANAGER_REQ);
if(nRet<0){
qInfo() << sSn<<"重启相机失败"<<nRet;
return ;
}
QString ip=QString::fromUtf8(loginParam->sDevId);
MainWindow::sp_this->clearOfflineCameraHandle(ip,loginParam->nDevPort);}
bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2) {
QPolygonF realPolygon; QPolygonF realPolygon;
realPolygon << QPointF(poly1.getArea().topLeftCornerX, poly1.getArea().topLeftCornerY) realPolygon << QPointF(poly1.getArea().topLeftCornerX, poly1.getArea().topLeftCornerY)
<< QPointF(poly1.getArea().bottomLeftCornerX, poly1.getArea().bottomLeftCornerY) << QPointF(poly1.getArea().bottomLeftCornerX, poly1.getArea().bottomLeftCornerY)
...@@ -835,8 +882,8 @@ bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo & ...@@ -835,8 +882,8 @@ bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo &
<< QPointF(poly1.getArea().topRightCornerX, poly1.getArea().topRightCornerY); << QPointF(poly1.getArea().topRightCornerX, poly1.getArea().topRightCornerY);
QPainterPath realPath; QPainterPath realPath;
realPath.addPolygon(realPolygon); realPath.addPolygon(realPolygon);
QPolygonF spacePolygon; QPolygonF spacePolygon;
spacePolygon << QPointF(poly2.getArea().topLeftCornerX, poly2.getArea().topLeftCornerY) spacePolygon << QPointF(poly2.getArea().topLeftCornerX, poly2.getArea().topLeftCornerY)
<< QPointF(poly2.getArea().bottomLeftCornerX, poly2.getArea().bottomLeftCornerY) << QPointF(poly2.getArea().bottomLeftCornerX, poly2.getArea().bottomLeftCornerY)
...@@ -844,10 +891,10 @@ bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo & ...@@ -844,10 +891,10 @@ bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo &
<< QPointF(poly2.getArea().topRightCornerX, poly2.getArea().topRightCornerY); << QPointF(poly2.getArea().topRightCornerX, poly2.getArea().topRightCornerY);
QPainterPath spacePath; QPainterPath spacePath;
spacePath.addPolygon(spacePolygon); spacePath.addPolygon(spacePolygon);
// 使用intersected方法获取两个路径的交集 // 使用intersected方法获取两个路径的交集
QPainterPath intersection = realPath.intersected(spacePath); QPainterPath intersection = realPath.intersected(spacePath);
// 如果交集不为空,则两个多边形重叠 // 如果交集不为空,则两个多边形重叠
return !intersection.isEmpty(); return !intersection.isEmpty();
} }
...@@ -855,9 +902,9 @@ bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo & ...@@ -855,9 +902,9 @@ bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo &
double CameraHandle::calculateIntersectionArea(const QPolygonF &polygon1, const QPolygonF &polygon2) { double CameraHandle::calculateIntersectionArea(const QPolygonF &polygon1, const QPolygonF &polygon2) {
QPolygonF intersection = polygon1.intersected(polygon2); QPolygonF intersection = polygon1.intersected(polygon2);
int n = intersection.count(); int n = intersection.count();
if (n < 3) return 0.0; if (n < 3) return 0.0;
// 构建增量式凸包 // 构建增量式凸包
std::vector<QPointF> convexHullPoints; std::vector<QPointF> convexHullPoints;
for (const QPointF& point : intersection) { for (const QPointF& point : intersection) {
...@@ -866,7 +913,7 @@ double CameraHandle::calculateIntersectionArea(const QPolygonF &polygon1, const ...@@ -866,7 +913,7 @@ double CameraHandle::calculateIntersectionArea(const QPolygonF &polygon1, const
} }
convexHullPoints.push_back(point); convexHullPoints.push_back(point);
} }
double area = 0.0; double area = 0.0;
for (size_t i = 0; i < convexHullPoints.size(); ++i) { for (size_t i = 0; i < convexHullPoints.size(); ++i) {
size_t j = (i + 1) % convexHullPoints.size(); size_t j = (i + 1) % convexHullPoints.size();
...@@ -905,7 +952,7 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){ ...@@ -905,7 +952,7 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){
for (const auto& point : polygonInfoPoints) { for (const auto& point : polygonInfoPoints) {
qDebug() << "(" << point.x << ", " << point.y << ")"; qDebug() << "(" << point.x << ", " << point.y << ")";
} }
std::vector<cv::Point2f> intersection; std::vector<cv::Point2f> intersection;
double intersectionArea = cv::intersectConvexConvex(polygonInfoPoints, currentPolygonPoints, intersection, true); double intersectionArea = cv::intersectConvexConvex(polygonInfoPoints, currentPolygonPoints, intersection, true);
if (intersectionArea>0.0 && intersectionArea > maxIntersectionArea) { if (intersectionArea>0.0 && intersectionArea > maxIntersectionArea) {
...@@ -913,7 +960,7 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){ ...@@ -913,7 +960,7 @@ int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){
areaOfMaxIntersection = info->getSpaceIndex(); areaOfMaxIntersection = info->getSpaceIndex();
} }
} }
return areaOfMaxIntersection; return areaOfMaxIntersection;
} }
...@@ -926,27 +973,33 @@ int CameraHandle::determineArea(ParkingSpaceInfo &prakArea){ ...@@ -926,27 +973,33 @@ int CameraHandle::determineArea(ParkingSpaceInfo &prakArea){
<< QPoint(prakArea.getArea().bottomLeftCornerX,prakArea.getArea().bottomLeftCornerY) << QPoint(prakArea.getArea().bottomLeftCornerX,prakArea.getArea().bottomLeftCornerY)
<< QPoint(prakArea.getArea().bottomRightCornerX, prakArea.getArea().bottomRightCornerY) << QPoint(prakArea.getArea().bottomRightCornerX, prakArea.getArea().bottomRightCornerY)
<< QPoint(prakArea.getArea().topRightCornerX, prakArea.getArea().topRightCornerY); << QPoint(prakArea.getArea().topRightCornerX, prakArea.getArea().topRightCornerY);
for (ParkingSpaceInfo *info : parkingSpaceInfos) { for (ParkingSpaceInfo *info : parkingSpaceInfos) {
QPolygon polygonInfo; // 移动定义到这里,确保每次迭代时重新初始化 QPolygon polygonInfo; // 移动定义到这里,确保每次迭代时重新初始化
polygonInfo << QPoint(info->getArea().topLeftCornerX, info->getArea().topLeftCornerY) polygonInfo << QPoint(info->getArea().topLeftCornerX, info->getArea().topLeftCornerY)
<< QPoint(info->getArea().bottomLeftCornerX, info->getArea().bottomLeftCornerY) << QPoint(info->getArea().bottomLeftCornerX, info->getArea().bottomLeftCornerY)
<< QPoint(info->getArea().bottomRightCornerX, info->getArea().bottomRightCornerY) << QPoint(info->getArea().bottomRightCornerX, info->getArea().bottomRightCornerY)
<< QPoint(info->getArea().topRightCornerX, info->getArea().topRightCornerY); << QPoint(info->getArea().topRightCornerX, info->getArea().topRightCornerY);
if (polygonsOverlap(prakArea, *info)) { if (polygonsOverlap(prakArea, *info)) {
double currentIntersection = calculateIntersectionArea(polygonInfo, currentPolygon); double currentIntersection = calculateIntersectionArea(polygonInfo, currentPolygon);
if (currentIntersection > maxIntersectionArea) { if (currentIntersection > maxIntersectionArea) {
maxIntersectionArea = currentIntersection; maxIntersectionArea = currentIntersection;
areaOfMaxIntersection = info->getSpaceIndex(); areaOfMaxIntersection = info->getSpaceIndex();
} }
} }
} }
return areaOfMaxIntersection; return areaOfMaxIntersection;
} }
void CameraHandle::initAlgorithmPermissions(__uint8_t algorithm){
if(algorithm!=algorithmPermissions){
this->algorithmPermissions=algorithm;
}
}
void CameraHandle::initParkingSpaceInfo(const std::list<vides_data::responseArea> &areas){ void CameraHandle::initParkingSpaceInfo(const std::list<vides_data::responseArea> &areas){
int index = 1; int index = 1;
for (auto area = areas.begin(); area != areas.end(); ++area) { for (auto area = areas.begin(); area != areas.end(); ++area) {
...@@ -954,15 +1007,15 @@ void CameraHandle::initParkingSpaceInfo(const std::list<vides_data::responseArea ...@@ -954,15 +1007,15 @@ void CameraHandle::initParkingSpaceInfo(const std::list<vides_data::responseArea
vides_data::ParkingArea pArea; vides_data::ParkingArea pArea;
pArea.bottomLeftCornerX = area->bottom_left_corner_x; pArea.bottomLeftCornerX = area->bottom_left_corner_x;
pArea.bottomLeftCornerY = area->bottom_left_corner_y; pArea.bottomLeftCornerY = area->bottom_left_corner_y;
pArea.topLeftCornerX = area->top_left_corner_x; pArea.topLeftCornerX = area->top_left_corner_x;
pArea.topLeftCornerY = area->top_left_corner_y; pArea.topLeftCornerY = area->top_left_corner_y;
pArea.topRightCornerX = area->top_right_corner_x; pArea.topRightCornerX = area->top_right_corner_x;
pArea.topRightCornerY = area->top_right_corner_y; pArea.topRightCornerY = area->top_right_corner_y;
pArea.bottomRightCornerX = area->bottom_right_corner_x; pArea.bottomRightCornerX = area->bottom_right_corner_x;
pArea.bottomRightCornerY = area->bottom_right_corner_y; pArea.bottomRightCornerY = area->bottom_right_corner_y;
info->setArea(pArea); info->setArea(pArea);
if (parkMap.find(index) == parkMap.end()) { if (parkMap.find(index) == parkMap.end()) {
info->setSpaceIndex(index); // Assuming this method sets the space index info->setSpaceIndex(index); // Assuming this method sets the space index
...@@ -985,10 +1038,10 @@ bool CameraHandle::compareLists(const std::list<vides_data::responseArea>& newAr ...@@ -985,10 +1038,10 @@ bool CameraHandle::compareLists(const std::list<vides_data::responseArea>& newAr
if (newAreas.size() != areas.size()) { if (newAreas.size() != areas.size()) {
return false; return false;
} }
auto itResponse = newAreas.begin(); auto itResponse = newAreas.begin();
auto itParking = areas.begin(); auto itParking = areas.begin();
// 逐个比较 responseArea 和 ParkingArea 对象是否相同 // 逐个比较 responseArea 和 ParkingArea 对象是否相同
while (itResponse != newAreas.end() && itParking != areas.end()) { while (itResponse != newAreas.end() && itParking != areas.end()) {
if (itResponse->bottom_right_corner_x != itParking->bottomRightCornerX || if (itResponse->bottom_right_corner_x != itParking->bottomRightCornerX ||
...@@ -1001,11 +1054,11 @@ bool CameraHandle::compareLists(const std::list<vides_data::responseArea>& newAr ...@@ -1001,11 +1054,11 @@ bool CameraHandle::compareLists(const std::list<vides_data::responseArea>& newAr
itResponse->top_right_corner_y != itParking->topRightCornerY) { itResponse->top_right_corner_y != itParking->topRightCornerY) {
return false; // 如果任意一个元素不匹配,则返回 false return false; // 如果任意一个元素不匹配,则返回 false
} }
++itResponse; ++itResponse;
++itParking; ++itParking;
} }
return true; return true;
} }
void CameraHandle::updateParkMapAndParkingSpaceInfos(const std::list<vides_data::responseArea>&newAreas){ void CameraHandle::updateParkMapAndParkingSpaceInfos(const std::list<vides_data::responseArea>&newAreas){
......
...@@ -11,6 +11,8 @@ ...@@ -11,6 +11,8 @@
#include "Json_Header/System_TimeZone.h" #include "Json_Header/System_TimeZone.h"
#include "Json_Header/RecordCfg.h" #include "Json_Header/RecordCfg.h"
#include "Json_Header/NetWork_SPVMN.h" #include "Json_Header/NetWork_SPVMN.h"
#include "Json_Header/SystemInfo.h"
#include "Json_Header/OPMachine.h"
#include "mainwindow.h" #include "mainwindow.h"
#include "ParkingSpaceInfo.h" #include "ParkingSpaceInfo.h"
#include "so_human_sdk.h" #include "so_human_sdk.h"
...@@ -45,25 +47,25 @@ public: ...@@ -45,25 +47,25 @@ public:
int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout); int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
//int SdkMediaGetFaceImage(int hDevice, int nSeq, int nTimeout); //int SdkMediaGetFaceImage(int hDevice, int nSeq, int nTimeout);
int sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener); int sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener);
int getHdevice(); int getHdevice();
int getChannel(); int getChannel();
void clearCameraHandle(); void clearCameraHandle();
// void rebindTimer(int hDevice); // void rebindTimer(int hDevice);
void initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 face_frequency); void initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 face_frequency);
void updateImage(const cv::Mat & frame,qint64 currentTime); void updateImage(const cv::Mat & frame,qint64 currentTime);
void matToBase64(const cv::Mat &image, QByteArray &base64Data); void matToBase64(const cv::Mat &image, QByteArray &base64Data);
int callbackFunction(XSDK_HANDLE hObject,QString &szString); int callbackFunction(XSDK_HANDLE hObject,QString &szString);
void checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat & frame,RecognizedInfo& newInfo,int &result,std::map<int,RecognizedInfo>&exitAndMoMap); void checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat & frame,RecognizedInfo& newInfo,int &result,std::map<int,RecognizedInfo>&exitAndMoMap);
void licensePlateRecognitionResults(vides_data::requestLicensePlate &location); void licensePlateRecognitionResults(vides_data::requestLicensePlate &location);
void sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel); void sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel);
//时间设置 //时间设置
void sdkDevSystemTimeZoneSyn(QString &time); void sdkDevSystemTimeZoneSyn(QString &time);
...@@ -73,90 +75,96 @@ public: ...@@ -73,90 +75,96 @@ public:
void sdkEncodeCfg(const char *enCode); void sdkEncodeCfg(const char *enCode);
//28181更新 //28181更新
void sdkDevSpvMn(const char* spvMn); void sdkDevSpvMn(const char* spvMn);
//重启设备
void deviceReboot();
//获取固件版本
void findFirmwareVersion(QString &firmwareVersion);
//获取ip
void findIp(QString &ip);
void sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id, void sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
QString startTimer,QString endTime); QString startTimer,QString endTime);
void setTimeoutMs(int timeoutMs);
QString getSSn(); QString getSSn();
int getMediaHandle(); int getMediaHandle();
void setMediaHandle(int mediaHandle); void setMediaHandle(int mediaHandle);
void setCurrentFace(int currentFace); void setCurrentFace(int currentFace);
void initAlgorithmPermissions(__uint8_t algorithm);
void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas); void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas);
bool compareLists(const std::list<vides_data::responseArea>& newAreas); bool compareLists(const std::list<vides_data::responseArea>& newAreas);
void updateParkMapAndParkingSpaceInfos(const std::list<vides_data::responseArea>&newAreas); void updateParkMapAndParkingSpaceInfos(const std::list<vides_data::responseArea>&newAreas);
std::map<int, vides_data::responseRecognitionData>&getVideoCurrentData(); std::map<int, vides_data::responseRecognitionData>&getVideoCurrentData();
std::map<QString, QString>&getCurrentData(); std::map<QString, QString>&getCurrentData();
// 检查点是否在多边形内 // 检查点是否在多边形内
bool polygonsOverlap(ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2); bool polygonsOverlap(ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2);
// 计算两个多边形的交集面积 // 计算两个多边形的交集面积
double calculateIntersectionArea(const QPolygonF &polygon1, const QPolygonF &polygon2); double calculateIntersectionArea(const QPolygonF &polygon1, const QPolygonF &polygon2);
double ccw(const QPointF& a, const QPointF& b, const QPointF& c); double ccw(const QPointF& a, const QPointF& b, const QPointF& c);
void getCurrentFrame(std::vector<uchar> &buffer); void getCurrentFrame(std::vector<uchar> &buffer);
int findPointRegion(ParkingSpaceInfo &prakArea); int findPointRegion(ParkingSpaceInfo &prakArea);
int determineArea(ParkingSpaceInfo &prakArea); int determineArea(ParkingSpaceInfo &prakArea);
signals: signals:
void callbackFrameReady(const cv::Mat &frame, const QString &url); void callbackFrameReady(const cv::Mat &frame, const QString &url);
void afterDownloadFile( int id,int recognitionType,QString ossUrl); void afterDownloadFile( int id,int recognitionType,QString ossUrl);
private slots: private slots:
void sdkRealTimeDevSnapSyn(int hDevice); void sdkRealTimeDevSnapSyn(int hDevice);
void pushRecordToCloud(int id,int recognitionType,QString ossUrl); void pushRecordToCloud(int id,int recognitionType,QString ossUrl);
//void releaseSemaphore(); //void releaseSemaphore();
private : private :
int hDevice; int hDevice;
int channel; int channel;
QString httpUrl; QString httpUrl;
SXSDKLoginParam *loginParam; SXSDKLoginParam *loginParam;
SXMediaFaceImageReq *sxMediaFaceImageReq; SXMediaFaceImageReq *sxMediaFaceImageReq;
std::mutex plateMutex; std::mutex plateMutex;
std::mutex faceMutex; std::mutex faceMutex;
QString sSn; QString sSn;
QString url; QString url;
std::map<int, vides_data::responseRecognitionData> videoCurrentData; std::map<int, vides_data::responseRecognitionData> videoCurrentData;
std::map<QString, QString> currentData; std::map<QString, QString> currentData;
//每个区域编号对应一个区域信息 //每个区域编号对应一个区域信息
std::map<int,ParkingSpaceInfo*>parkMap; std::map<int,ParkingSpaceInfo*>parkMap;
//当前相机监视所以车位区域 //当前相机监视所以车位区域
std::vector<ParkingSpaceInfo*>parkingSpaceInfos; std::vector<ParkingSpaceInfo*>parkingSpaceInfos;
//当前人脸数 //当前人脸数
int currentFace; int currentFace;
int mediaHandle; int mediaHandle;
//2秒钟抓一次图 //2秒钟抓一次图
QTimer *dev_snap_syn_timer; QTimer *dev_snap_syn_timer;
int offlineCount=0; int offlineCount=0;
TCV_HumanDetector *detector; TCV_HumanDetector *detector;
P_HLPR_Context ctx ; P_HLPR_Context ctx ;
QSemaphore semaphore; QSemaphore semaphore;
int timeoutMs;
int image_save; int image_save;
std::atomic<uint64> faceCount; std::atomic<uint64> faceCount;
uint64 face_frequency; uint64 face_frequency;
__uint8_t algorithmPermissions;
}; };
#endif // CAMERAHANDLE_H #endif // CAMERAHANDLE_H
#ifndef FACERECOGNITION_H #ifndef FACERECOGNITION_H
#define FACERECOGNITION_H #define FACERECOGNITION_H
#include "hyperface.h" #include "hyperface.h"
#include <opencv2/opencv.hpp>
#include<QCoreApplication>
#include "herror.h" #include "herror.h"
#include "LogHandle.h" #include "LogHandle.h"
#include "VidesData.h" #include "VidesData.h"
#include <QReadWriteLock> #include <opencv2/opencv.hpp>
#include<QCoreApplication>
class FaceReconition class FaceReconition
{ {
...@@ -16,10 +17,8 @@ private: ...@@ -16,10 +17,8 @@ private:
HContextHandle ctxHandle=nullptr; HContextHandle ctxHandle=nullptr;
float configConfidence; float configConfidence;
std::vector<int32_t>customIds;
QReadWriteLock rwLock; std::vector<int32_t>customIds;
FaceReconition(); FaceReconition();
~FaceReconition(); ~FaceReconition();
...@@ -32,11 +31,12 @@ public: ...@@ -32,11 +31,12 @@ public:
return instance; return instance;
} }
cv::Mat loadImage(const QString &path); cv::Mat loadImage(const QString &path);
cv::Mat loadImageFromByteStream(const QString& filePath);
void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face); void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face);
void initSourceImageMap(std::map<QString,QString>&maps,float confidence); void initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence);
int featureRemove(); int featureRemove();
}; };
......
...@@ -2,6 +2,11 @@ ...@@ -2,6 +2,11 @@
#include <QImage> #include <QImage>
#include <QThread> #include <QThread>
#define cimg_display 0
#include "CImg.h"
using namespace cimg_library;
FaceReconition::FaceReconition() {} FaceReconition::FaceReconition() {}
...@@ -15,33 +20,44 @@ FaceReconition::~FaceReconition(){ ...@@ -15,33 +20,44 @@ FaceReconition::~FaceReconition(){
FaceReconition* FaceReconition::instance = nullptr; FaceReconition* FaceReconition::instance = nullptr;
//cv::Mat FaceReconition::loadImage(const QString &path) {
// // 尝试使用OpenCV直接加载图像
// std::string stdPath = path.toStdString(); // 将路径转换为std::string
// cv::Mat image = cv::imread(stdPath, cv::IMREAD_COLOR); // 尝试加载图像
// if (!image.empty()) {
// qDebug() << "图像以OpenCV成功加载。";
// return image;
// }
// // 使用OpenCV加载失败,尝试使用QImage
// qDebug() << "使用OpenCV加载图像失败,尝试QImage转换。";
// QImage qimg(path);
// if (qimg.isNull()) {
// qDebug() << "QImage也无法加载图像,检查文件路径或文件损坏。";
// return cv::Mat(); // 返回空的cv::Mat对象
// }
// // 转换QImage格式为RGB888
// QImage converted = qimg.convertToFormat(QImage::Format_RGB888);
// cv::Mat mat(converted.height(), converted.width(), CV_8UC3, const_cast<uchar*>(converted.bits()), static_cast<size_t>(converted.bytesPerLine()));
// // 不进行颜色转换,直接返回
// return mat;
//}
cv::Mat FaceReconition::loadImage(const QString &path) { cv::Mat FaceReconition::loadImage(const QString &path) {
// 尝试使用OpenCV直接加载图像 // 尝试使用OpenCV直接加载图像
std::string stdPath = path.toStdString(); // 将路径转换为std::string std::string stdPath = path.toStdString();
cv::Mat image = cv::imread(stdPath, cv::IMREAD_COLOR); // 尝试加载图像 cv::Mat image = cv::imread(stdPath, cv::IMREAD_COLOR);
if (!image.empty()) { if (!image.empty()) {
qDebug() << "图像以OpenCV成功加载。"; qDebug() << "图像以OpenCV成功加载。";
return image; return image;
} }
// 使用OpenCV加载失败,尝试使用QImage return loadImageFromByteStream(path);
qDebug() << "使用OpenCV加载图像失败,尝试QImage转换。";
QImage qimg(path);
if (qimg.isNull()) {
qDebug() << "QImage也无法加载图像,检查文件路径或文件损坏。";
return cv::Mat(); // 返回空的cv::Mat对象
}
// 转换QImage格式为RGB888
QImage converted = qimg.convertToFormat(QImage::Format_RGB888);
cv::Mat mat(converted.height(), converted.width(), CV_8UC3, const_cast<uchar*>(converted.bits()), converted.bytesPerLine());
// 将RGB转换为BGR,以便OpenCV处理
cv::cvtColor(mat, mat, cv::COLOR_RGB2BGR);
return mat;
} }
void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float confidence){ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence){
//QWriteLocker locker(&rwLock); //QWriteLocker locker(&rwLock);
featureRemove(); featureRemove();
HResult ret; HResult ret;
...@@ -62,7 +78,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float con ...@@ -62,7 +78,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float con
HF_DetectMode detMode = HF_DETECT_MODE_IMAGE; // 选择图像模式 即总是检测 HF_DetectMode detMode = HF_DETECT_MODE_IMAGE; // 选择图像模式 即总是检测
if(ctxHandle==nullptr){ if(ctxHandle==nullptr){
// 创建ctx // 创建ctx
ret = HF_CreateFaceContextFromResourceFileOptional(path, option, detMode, 5, &ctxHandle); ret = HF_CreateFaceContextFromResourceFileOptional(path, option, detMode, numberFaces, &ctxHandle);
if (ret != HSUCCEED) { if (ret != HSUCCEED) {
qInfo() << QString("Create ctx error: %1").arg(ret); qInfo() << QString("Create ctx error: %1").arg(ret);
return; return;
...@@ -149,6 +165,45 @@ int FaceReconition::featureRemove(){ ...@@ -149,6 +165,45 @@ int FaceReconition::featureRemove(){
} }
} }
} }
cv::Mat FaceReconition::loadImageFromByteStream(const QString& filePath) {
try {
// 使用 CImg 读取 JPEG 图像
QByteArray bPath =filePath.toUtf8();
const char* ctr=bPath.data();
CImg<unsigned char> cimg_image(ctr);
// 将 CImg 对象转换为 OpenCV 的 Mat 格式
int width = cimg_image.width();
int height = cimg_image.height();
int channels = cimg_image.spectrum(); // 图像通道数
// 创建一个对应的空 Mat 对象
cv::Mat opencv_image(height, width, channels == 1 ? CV_8UC1 : CV_8UC3);
// 复制图像数据到 Mat 对象
cimg_forXY(cimg_image, x, y) {
if (channels == 1) {
opencv_image.at<unsigned char>(y, x) = cimg_image(x, y, 0, 0);
} else {
// 注意 OpenCV 默认使用 BGR 而不是 RGB
cv::Vec3b& opencv_pixel = opencv_image.at<cv::Vec3b>(y, x);
opencv_pixel[2] = cimg_image(x, y, 0, 0); // Red
opencv_pixel[1] = cimg_image(x, y, 0, 1); // Green
opencv_pixel[0] = cimg_image(x, y, 0, 2); // Blue
}
}
return opencv_image;
} catch (const CImgException& e) {
qDebug() << "CImg Error: " << e.what();
return cv::Mat();;
} catch (const cv::Exception& e) {
qDebug() << "OpenCV Error: " << e.what();
return cv::Mat(); ;
}
return cv::Mat();
}
void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
//QReadLocker locker(&rwLock); //QReadLocker locker(&rwLock);
HResult ret; HResult ret;
...@@ -207,34 +262,6 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d ...@@ -207,34 +262,6 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
qDebug()<<QString("匹配到的tag: %1").arg(searchIdentity.tag); qDebug()<<QString("匹配到的tag: %1").arg(searchIdentity.tag);
qDebug()<<QString("匹配到的customId: %1").arg(searchIdentity.customId); qDebug()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
// Face Pipeline // Face Pipeline
ret = HF_MultipleFacePipelineProcess(ctxHandle, imageSteamHandle, &multipleFaceData, parameter);
if (ret != HSUCCEED) {
//printf("pipeline执行失败: %ld", ret);
qInfo()<<QString("pipeline执行失败: %1").arg(ret);
return ;
}
HF_RGBLivenessConfidence livenessConfidence = {0};
ret = HF_GetRGBLivenessConfidence(ctxHandle, &livenessConfidence);
if (ret != HSUCCEED) {
qInfo()<<QString("获取活体数据失败1");
return ;
}
//printf("活体置信度: %f", livenessConfidence.confidence[0]);
qDebug()<<QString("活体置信度====>:%1").arg(livenessConfidence.confidence[0],0,'Q',4);
HF_FaceMaskConfidence maskConfidence = {0};
ret = HF_GetFaceMaskConfidence(ctxHandle, &maskConfidence);
if (ret != HSUCCEED) {
qInfo()<<QString("获口罩数据失败");
return ;
}
HInt32 faceNum;
ret = HF_FeatureGroupGetCount(ctxHandle, &faceNum);
if (ret != HSUCCEED) {
// printf("获取失败");
qInfo()<<QString("获取失败");
return ;
}
//printf("人脸特征数量: %d", faceNum); //printf("人脸特征数量: %d", faceNum);
if (confidence > configConfidence) { if (confidence > configConfidence) {
vides_data::faceRecognitionResult newface; vides_data::faceRecognitionResult newface;
......
...@@ -14,36 +14,52 @@ HttpService::~HttpService() { ...@@ -14,36 +14,52 @@ HttpService::~HttpService() {
vides_data::response* HttpService::httpPostDeviceStatus(vides_data::requestDeviceStatus & deviceStatus) { vides_data::response* HttpService::httpPostDeviceStatus(vides_data::requestDeviceStatus & deviceStatus) {
httpUrl.append("/api/v1.0/device/ping"); httpUrl.append("/api/v1.0/device/ping");
// 创建主 JSON 对象
QJsonObject json; QJsonObject json;
json.insert("sn",deviceStatus.sSn); json.insert("sn", deviceStatus.sSn);
json.insert("type",deviceStatus.type); json.insert("type", deviceStatus.type);
json.insert("state",deviceStatus.status); json.insert("state", deviceStatus.status);
json.insert("ip_addr",deviceStatus.ip_addr); json.insert("ip_addr", deviceStatus.ip_addr);
QJsonDocument jsonDoc; json.insert("firmware_version", deviceStatus.firmware_version); // 将固件版本添加到主 JSON 对象中
jsonDoc.setObject(json);
QByteArray bytearr= jsonDoc.toJson(QJsonDocument::Compact); // 创建摄像头信息列表 JSON 数组
QJsonArray cameraArray;
vides_data::response *resp=new vides_data::response(); for (const auto& cameraInfo : deviceStatus.camera_info_list) {
QJsonObject cameraObject;
cameraObject.insert("sn", cameraInfo.sSn);
cameraObject.insert("ip_addr", cameraInfo.ip_addr);
cameraObject.insert("firmware_version", cameraInfo.firmware_version);
cameraArray.append(cameraObject);
}
// 将摄像头信息列表添加到主 JSON 对象中
json.insert("camera_info_list", cameraArray);
// 将 JSON 对象转换为 JSON 文档
QJsonDocument jsonDoc(json);
QByteArray bytearr = jsonDoc.toJson(QJsonDocument::Compact);
vides_data::response *resp = new vides_data::response();
QNetworkRequest request; QNetworkRequest request;
request.setUrl(QUrl(httpUrl)); request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE); request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex); QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.post(request,bytearr)){ if (m_httpClient.post(request, bytearr)) {
QByteArray && byte=m_httpClient.text().toUtf8(); QByteArray && byte = m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data()); QJsonDocument docujson = QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object(); QJsonObject maps = docujson.object();
QVariantMap map =std::move(maps.toVariantMap()); QVariantMap map = std::move(maps.toVariantMap());
resp->code=map["code"].toInt(); resp->code = map["code"].toInt();
resp->msg=map["message"].toString(); resp->msg = map["message"].toString();
}else{ } else {
qDebug()<<"httpPostDeviceStatus"<<m_httpClient.errorCode(); qDebug() << "httpPostDeviceStatus" << m_httpClient.errorCode();
resp->code=2; resp->code = 2;
resp->msg=OPERATION_FAILED; resp->msg = m_httpClient.errorString();
} }
return resp; return resp;
} }
vides_data::response* HttpService::httpPostRecord(int id,int recongnition_type,QString sn,QString videw_addr){ vides_data::response* HttpService::httpPostRecord(int id,int recongnition_type,QString sn,QString videw_addr){
httpUrl.append("/api/v1.0/recongnition/record"); httpUrl.append("/api/v1.0/recongnition/record");
...@@ -108,7 +124,8 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d ...@@ -108,7 +124,8 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d
HttpService::stsCredentials.endpoint=responseData.sts_credentials.endpoint = stsCredentialsObj["endpoint"].toString(); HttpService::stsCredentials.endpoint=responseData.sts_credentials.endpoint = stsCredentialsObj["endpoint"].toString();
HttpService::stsCredentials.expiration=responseData.sts_credentials.expiration = stsCredentialsObj["expiration"].toString(); HttpService::stsCredentials.expiration=responseData.sts_credentials.expiration = stsCredentialsObj["expiration"].toString();
HttpService::stsCredentials.security_token=responseData.sts_credentials.security_token = stsCredentialsObj["security_token"].toString(); HttpService::stsCredentials.security_token=responseData.sts_credentials.security_token = stsCredentialsObj["security_token"].toString();
int algorithm= dataObj["algorithm"].toInt();
responseData.algorithm=algorithm;
QJsonArray dataArray = dataObj["list"].toArray(); QJsonArray dataArray = dataObj["list"].toArray();
for (const QJsonValue& value : dataArray) { for (const QJsonValue& value : dataArray) {
vides_data::responseDeviceStatus status; vides_data::responseDeviceStatus status;
...@@ -116,6 +133,7 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d ...@@ -116,6 +133,7 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d
status.sSn = deviceObject["sn"].toString(); status.sSn = deviceObject["sn"].toString();
status.type = static_cast<int8_t>(deviceObject["type"].toInt()); status.type = static_cast<int8_t>(deviceObject["type"].toInt());
status.is_reboot=deviceObject["is_reboot"].toBool();
status.merchant_id = static_cast<int8_t>(deviceObject["merchant_id"].toInt()); status.merchant_id = static_cast<int8_t>(deviceObject["merchant_id"].toInt());
// 处理"areas"数组 // 处理"areas"数组
...@@ -268,6 +286,39 @@ vides_data::response* HttpService::httpFindFaceReconition(QString &serialNumber, ...@@ -268,6 +286,39 @@ vides_data::response* HttpService::httpFindFaceReconition(QString &serialNumber,
} }
return resp; return resp;
} }
vides_data::response *HttpService::httpPostUniforms(QByteArray &img,int &number,QString sn,qint64 time){
httpUrl.append("/api/v1.0/recongnition/uniform");
QJsonObject json;
json.insert("img", QJsonValue::fromVariant(img));
json.insert("sn",sn);
json.insert("number",number);
json.insert("time",QJsonValue::fromVariant(time));
QJsonDocument jsonDoc;
jsonDoc.setObject(json);
QByteArray bytearr= jsonDoc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.post(request,bytearr)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =maps.toVariantMap();
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
qDebug()<<"httpPostUniforms"<<m_httpClient.errorString();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &number,QString sn,qint64 time){ vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &number,QString sn,qint64 time){
httpUrl.append("/api/v1.0/recongnition/population"); httpUrl.append("/api/v1.0/recongnition/population");
QJsonObject json; QJsonObject json;
......
...@@ -35,6 +35,9 @@ public: ...@@ -35,6 +35,9 @@ public:
vides_data::response *httpPostFaceReconition(vides_data::requestFaceReconition & faceReconition); vides_data::response *httpPostFaceReconition(vides_data::requestFaceReconition & faceReconition);
//人数变化推送 //人数变化推送
vides_data::response *httpPostFacePopulation(QByteArray &img,int &number,QString sn,qint64 time); vides_data::response *httpPostFacePopulation(QByteArray &img,int &number,QString sn,qint64 time);
//工服推送
vides_data::response *httpPostUniforms(QByteArray &img,int &number,QString sn,qint64 time);
//客户端组列表 //客户端组列表
vides_data::response *httpFindStream(QString &serialNumber); vides_data::response *httpFindStream(QString &serialNumber);
......
...@@ -15,10 +15,34 @@ int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector ...@@ -15,10 +15,34 @@ int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector
// 执行一帧目标检测 // 执行一帧目标检测
TCV_HumanDetectorProcessFrame(detector, stream); TCV_HumanDetectorProcessFrame(detector, stream);
int num = (res == 0) ? TCV_HumanDetectorGetNumOfHuman(detector) :TCV_HumanDetectorGetNumOfCar(detector); int num=-1;
qDebug() << (res == 0 ? "Number of people detected:" : "Number of cars detected:") << num; if(res==0x00 || res==0x02){
TCV_ReleaseCameraStream(stream); num= TCV_HumanDetectorGetNumOfHuman(detector);
if (num > 0 && res==0x02) {
// 创建一个接收检测结果的对象数组
TCV_ObjectLocation result[num];
// 提取行人检测结果
TCV_HumanDetectorGetHumanLocation(detector, result, num);
int num_uniforms = 0;
//工服
for (int i = 0; i < num; ++i) {
if (result[i].uniform == 0) {
++num_uniforms;
}
}
num=num_uniforms;
}
qDebug() << (res == 0 ? "Number of people detected:" : "Number of people with uniform == 0 detected:") << num;
}else if (res==0x01) {
num=TCV_HumanDetectorGetNumOfCar(detector);
qDebug() << "Number of cars detected:" << num;
}else {
qDebug() << "参数错误";
}
TCV_ReleaseCameraStream(stream);
return num; return num;
} }
...@@ -21,6 +21,7 @@ void ParkingSpaceInfo::removeQueue(){ ...@@ -21,6 +21,7 @@ void ParkingSpaceInfo::removeQueue(){
queuels.dequeue(); queuels.dequeue();
} }
} }
void ParkingSpaceInfo::removeNoQueue() { void ParkingSpaceInfo::removeNoQueue() {
QMutexLocker locker(&queueMutex); QMutexLocker locker(&queueMutex);
if (!queuels.isEmpty() && queuels.size() > 3) { if (!queuels.isEmpty() && queuels.size() > 3) {
......
...@@ -23,14 +23,27 @@ struct response ...@@ -23,14 +23,27 @@ struct response
QString msg; QString msg;
response() {} response() {}
}; };
struct requestCameraInfo{
QString ip_addr;
QString firmware_version;
QString sSn;
requestCameraInfo() {}
};
struct requestDeviceStatus struct requestDeviceStatus
{ {
QString sSn; QString sSn;
int8_t type; int8_t type;
int8_t status; int8_t status;
QString ip_addr; QString ip_addr;
QString firmware_version;
std::list<requestCameraInfo>camera_info_list;
requestDeviceStatus() {} requestDeviceStatus() {}
}; };
struct responseStsCredentials{ struct responseStsCredentials{
QString access_key_id; QString access_key_id;
QString access_key_secret; QString access_key_secret;
...@@ -67,10 +80,12 @@ struct responseDeviceStatus ...@@ -67,10 +80,12 @@ struct responseDeviceStatus
QString sSn; QString sSn;
int8_t type; int8_t type;
int8_t merchant_id; int8_t merchant_id;
bool is_reboot;
std::list<responseArea>areas; std::list<responseArea>areas;
responseDeviceStatus() {} responseDeviceStatus() : is_reboot(false) {}
}; };
struct responseDeviceData{ struct responseDeviceData{
int algorithm ;
std::list<responseDeviceStatus> list; std::list<responseDeviceStatus> list;
responseStsCredentials sts_credentials; responseStsCredentials sts_credentials;
}; };
...@@ -257,19 +272,22 @@ inline bool pingAddress(const QString &address) { ...@@ -257,19 +272,22 @@ inline bool pingAddress(const QString &address) {
return output.contains("1 packets transmitted, 1 received"); return output.contains("1 packets transmitted, 1 received");
} }
inline QString getSerialNumber() {
inline QString getCpuSerialNumber() {
QProcess process; QProcess process;
// 使用管道将两个命令的执行结果串联起来,直接查找包含"Serial"的行 // 启动一个进程运行shell命令
process.start("bash", QStringList() << "-c" << "cat /proc/cpuinfo | grep Serial"); process.start("sh", QStringList() << "-c" << "cat /proc/cpuinfo | grep Serial");
process.waitForFinished(-1); // 等待命令执行完成 // 等待命令执行完成
process.waitForFinished();
// 读取命令的标准输出
QString output = process.readAllStandardOutput(); QString output = process.readAllStandardOutput();
QString serialNumber; QString serialNumber;
if (!output.isEmpty()) { if (!output.isEmpty()) {
// 已经确保了输出仅包含 Serial 行,所以直接分割并提取 // 已经确保了输出仅包含 Serial 行,所以直接分割并提取
serialNumber = output.split(":").at(1).trimmed(); serialNumber = output.split(":").last().trimmed();
} }
return serialNumber; return serialNumber;
} }
} }
......
...@@ -3,7 +3,7 @@ QT += core gui network multimedia sql concurrent ...@@ -3,7 +3,7 @@ QT += core gui network multimedia sql concurrent
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
CONFIG += c++11 CONFIG += c++11
TARGET = GAMERAVIDEO TARGET = cameravideo
TEMPLATE = app TEMPLATE = app
# The following define makes your compiler emit warnings if you use # The following define makes your compiler emit warnings if you use
...@@ -11,45 +11,46 @@ TEMPLATE = app ...@@ -11,45 +11,46 @@ TEMPLATE = app
# depend on your compiler). Please consult the documentation of the # depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it. # deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.0.0\\\"
QMAKE_LIBDIR += /usr/local/lib #QMAKE_LIBDIR += /usr/local/lib
INCLUDEPATH+=/usr/local/include/opencv4 #INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface #INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper #INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK #INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human #INCLUDEPATH+=/usr/local/include/human
#INCLUDEPATH+=/usr/local/include/CImg
unix:contains(QMAKE_HOST.arch, x86_64) {
#unix:contains(QMAKE_HOST.arch, x86_64) { QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
# QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib }
#} unix:contains(QMAKE_HOST.arch, arm) {
#unix:contains(QMAKE_HOST.arch, arm) { QMAKE_LIBDIR += /usr/local/lib
# QMAKE_LIBDIR += /usr/local/lib }
#}
# 根据编译器类型选择库路径和头文件路径
## 根据编译器类型选择库路径和头文件路径 unix: {
#unix: { # x86 架构
# # x86 架构 contains(QMAKE_HOST.arch, x86_64) {
# contains(QMAKE_HOST.arch, x86_64) { INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4 INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human INCLUDEPATH+=/home/mark/Public/x86_opencv/include/CImg
# } }
# # ARM 架构 # ARM 架构
# contains(QMAKE_HOST.arch, arm) { contains(QMAKE_HOST.arch, arm) {
# INCLUDEPATH+=/usr/local/include/opencv4 INCLUDEPATH+=/usr/local/include/opencv4
# INCLUDEPATH+=/usr/local/include/hyperface INCLUDEPATH+=/usr/local/include/hyperface
# INCLUDEPATH+=/usr/local/include/hyper INCLUDEPATH+=/usr/local/include/hyper
# INCLUDEPATH+=/usr/local/include/XNetSDK INCLUDEPATH+=/usr/local/include/XNetSDK
# INCLUDEPATH+=/usr/local/include/human INCLUDEPATH+=/usr/local/include/human
# } }
#} }
# You can also make your code fail to compile if it uses deprecated APIs. # You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line. # In order to do so, uncomment the following line.
......
...@@ -28,14 +28,15 @@ MainWindow::MainWindow() ...@@ -28,14 +28,15 @@ MainWindow::MainWindow()
deleteFrameFileTimer->start(deMkvflieTimer); deleteFrameFileTimer->start(deMkvflieTimer);
initFaceFaceRecognition(); initFaceFaceRecognition();
FaceReconition &faceRecognition = FaceReconition::getInstance(); FaceReconition &faceRecognition = FaceReconition::getInstance();
float confidence=qSetting->value("devices/confidence").toFloat(); float confidence=qSetting->value("devices/confidence").toFloat();
int faceNumbers=qSetting->value("devices/faceNumbers").toInt();
if(localImageMap.size()>0){ if(localImageMap.size()>0){
faceRecognition.initSourceImageMap(localImageMap,confidence); faceRecognition.initSourceImageMap(localImageMap,faceNumbers,confidence);
} }
float carConfidence=qSetting->value("devices/carConfidence").toFloat();
//LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance(); //LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance();
//licensePlateRecogn.initHlprContext(modelPaths,qSetting->value("licensePlateRecognition/car_cascade_path").toString(),carConfidence); //licensePlateRecogn.initHlprContext(modelPaths,qSetting->value("licensePlateRecognition/car_cascade_path").toString(),carConfidence);
QString httpurl; QString httpurl;
...@@ -65,7 +66,7 @@ MainWindow::MainWindow() ...@@ -65,7 +66,7 @@ MainWindow::MainWindow()
dePermissionSynTimer->start(dePermissionTimer); dePermissionSynTimer->start(dePermissionTimer);
connect(&server, &QTcpServer::newConnection, this, &MainWindow::handleMatNewConnection); connect(&server, &QTcpServer::newConnection, this, &MainWindow::handleMatNewConnection);
int port=qSetting->value("localservice/port").toInt(); int port=qSetting->value("localservice/port").toInt();
if (!server.listen(QHostAddress::Any, port)) { if (!server.listen(QHostAddress::Any, port)) {
...@@ -75,6 +76,16 @@ MainWindow::MainWindow() ...@@ -75,6 +76,16 @@ MainWindow::MainWindow()
} }
} }
CameraHandle* MainWindow::findHandle(QString sn){
for (auto it = faceDetectionParkingPushs.begin(); it != faceDetectionParkingPushs.end(); ++it) {
QString currentSn = it->second->getSSn();
if (currentSn == sn) {
CameraHandle* matchedHandle = it->second;
return matchedHandle;
}
}
return nullptr;
}
void MainWindow::sendJsonResponse(QTcpSocket* socket, int code, const QString& data, const QString& msg){ void MainWindow::sendJsonResponse(QTcpSocket* socket, int code, const QString& data, const QString& msg){
QJsonObject jsonResponse; QJsonObject jsonResponse;
jsonResponse["code"] = code; jsonResponse["code"] = code;
...@@ -257,8 +268,10 @@ void MainWindow::updateLocalFace(const QString &httpurl) { ...@@ -257,8 +268,10 @@ void MainWindow::updateLocalFace(const QString &httpurl) {
faceRecognition.featureRemove(); faceRecognition.featureRemove();
} else { } else {
float confidence=qSetting->value("devices/confidence").toFloat(); float confidence=qSetting->value("devices/confidence").toFloat();
int faceNumbers=qSetting->value("devices/faceNumbers").toInt();
qDebug()<<"startMap != endMap-->"; qDebug()<<"startMap != endMap-->";
faceRecognition.initSourceImageMap(localImageMap,confidence); faceRecognition.initSourceImageMap(localImageMap,faceNumbers, confidence);
} }
} }
instance.deleteObj(res); instance.deleteObj(res);
...@@ -268,12 +281,11 @@ void MainWindow::findLocalSerialNumber(QString &serialNumber){ ...@@ -268,12 +281,11 @@ void MainWindow::findLocalSerialNumber(QString &serialNumber){
if(vides_data::isVirtualMachine()){ if(vides_data::isVirtualMachine()){
serialNumber = QSysInfo::machineUniqueId(); serialNumber = QSysInfo::machineUniqueId();
}else{ }else{
serialNumber =vides_data::getSerialNumber(); if(localSn.length()>0){
if (!serialNumber.isEmpty()) { serialNumber=localSn;
qDebug() << "CPU Serial Number:" << serialNumber; }else {
} else { serialNumber =vides_data::getCpuSerialNumber();
qDebug() << "CPU Serial Number not found!"; localSn=serialNumber;
return;
} }
} }
} }
...@@ -301,26 +313,22 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -301,26 +313,22 @@ void MainWindow::startCamera(const QString &httpurl){
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
QString nonConstHttpUrl = std::remove_const<QString>::type(httpurl); QString nonConstHttpUrl = std::remove_const<QString>::type(httpurl);
vides_data::responseDeviceData devices; vides_data::responseDeviceData devices;
// QString serialNumber = QSysInfo::machineUniqueId(); // QString serialNumber = QSysInfo::machineUniqueId();
QString serialNumber; QString serialNumber;
findLocalSerialNumber(serialNumber); findLocalSerialNumber(serialNumber);
qInfo()<<"serialNumber==:"<<serialNumber;
vides_data::requestDeviceStatus reStatus; vides_data::requestDeviceStatus reStatus;
reStatus.sSn=serialNumber; reStatus.sSn=serialNumber;
reStatus.status=1; reStatus.status=1;
reStatus.type=1; reStatus.type=1;
reStatus.ip_addr=instace.GetLocalIp(); reStatus.ip_addr=instace.GetLocalIp();
reStatus.firmware_version=APP_VERSION;
HttpService httpService(httpurl); HttpService httpService(httpurl);
vides_data::response *res=httpService.httpPostDeviceStatus(reStatus);
if(res->code!=0){
qInfo()<<"盒子状态上报失败 code:"<<res->code<<"msg:"<<res->data;
}
instace.deleteObj(res);
httpService.setHttpUrl(httpurl);
vides_data::response *re= httpService.httpFindCameras(serialNumber,devices); vides_data::response *re= httpService.httpFindCameras(serialNumber,devices);
if(re->code==0 || re->code==20004){ if(re->code==0 || re->code==20004){
QString username = qSetting->value("devices/username").toString(); QString username = qSetting->value("devices/username").toString();
...@@ -331,6 +339,7 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -331,6 +339,7 @@ void MainWindow::startCamera(const QString &httpurl){
instace.deleteObj(re); instace.deleteObj(re);
return ; return ;
} }
int alg=devices.algorithm;
for (const auto& device : devices.list) { for (const auto& device : devices.list) {
if(localDevices.count(device.sSn)>0 ){ if(localDevices.count(device.sSn)>0 ){
vides_data::localDeviceStatus* localDevice= localDevices.at(device.sSn); vides_data::localDeviceStatus* localDevice= localDevices.at(device.sSn);
...@@ -339,7 +348,7 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -339,7 +348,7 @@ void MainWindow::startCamera(const QString &httpurl){
QString key = ipAddress + ":" + QString::number(localDevice->TCPPort); QString key = ipAddress + ":" + QString::number(localDevice->TCPPort);
if(faceDetectionParkingPushs.count(key)<=0){ if(faceDetectionParkingPushs.count(key)<=0){
httpService.setHttpUrl(httpurl); httpService.setHttpUrl(httpurl);
vides_data::cameraParameters parameter; vides_data::cameraParameters parameter;
parameter.sDevId=ipAddress; parameter.sDevId=ipAddress;
parameter.nDevPort=localDevice->TCPPort; parameter.nDevPort=localDevice->TCPPort;
...@@ -350,21 +359,35 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -350,21 +359,35 @@ void MainWindow::startCamera(const QString &httpurl){
parameter.sSn=device.sSn; parameter.sSn=device.sSn;
//parameter.rtspUrl="rtsp://192.168.10.131:554/user=admin&password=&channel=1&stream=1.sdp?"; //parameter.rtspUrl="rtsp://192.168.10.131:554/user=admin&password=&channel=1&stream=1.sdp?";
//parameter.rtspUrl=std::move(QString("rtsp://admin:@%1/stream1").arg(ipAddress)); //parameter.rtspUrl=std::move(QString("rtsp://admin:@%1/stream1").arg(ipAddress));
this->initCameras(parameter,device.areas); this->initCameras(parameter,device.areas,alg,reStatus.camera_info_list);
} }
else { else {
auto it = this->faceDetectionParkingPushs.find(key); CameraHandle *indexHandle=findHandle(device.sSn);
if (it != this->faceDetectionParkingPushs.end()) { if(indexHandle!=nullptr &&device.is_reboot){
CameraHandle* offlineCameraHandle = it->second; // 注意使用->second获取值 indexHandle->deviceReboot();
if(!offlineCameraHandle->compareLists(device.areas)){ }else {
offlineCameraHandle->updateParkMapAndParkingSpaceInfos(device.areas); auto it = this->faceDetectionParkingPushs.find(key);
if (it != this->faceDetectionParkingPushs.end()) {
CameraHandle* offlineCameraHandle = it->second; // 注意使用->second获取值
vides_data::requestCameraInfo camera_info;
camera_info.sSn=offlineCameraHandle->getSSn();
offlineCameraHandle->findIp(camera_info.ip_addr);
offlineCameraHandle->findFirmwareVersion(camera_info.firmware_version);
reStatus.camera_info_list.push_front(camera_info);
__uint8_t new_algorithm= intToUint8t(alg);
offlineCameraHandle->initAlgorithmPermissions(new_algorithm);
if(!offlineCameraHandle->compareLists(device.areas)){
offlineCameraHandle->updateParkMapAndParkingSpaceInfos(device.areas);
}
} }
} }
} }
} }
} }
this->deleteCloudNotCamer(localDevices, devices.list); this->deleteCloudNotCamer(localDevices, devices.list);
for (auto& pair : localDevices) { for (auto& pair : localDevices) {
if (pair.second != nullptr) { // 如果对象未被删除(即不为nullptr) if (pair.second != nullptr) { // 如果对象未被删除(即不为nullptr)
instace.deleteObj(pair.second); instace.deleteObj(pair.second);
...@@ -373,10 +396,17 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -373,10 +396,17 @@ void MainWindow::startCamera(const QString &httpurl){
// 清空 localDevices 容器 // 清空 localDevices 容器
localDevices.clear(); localDevices.clear();
} }
httpService.setHttpUrl(httpurl);
vides_data::response *res=httpService.httpPostDeviceStatus(reStatus);
if(res->code!=0){
qInfo()<<"盒子状态上报失败 code:"<<res->code<<"msg:"<<res->msg;
}
instace.deleteObj(res);
updateLocalFace(httpurl); updateLocalFace(httpurl);
instace.deleteObj(re); instace.deleteObj(re);
} }
bool MainWindow::isDeviceInList(const QString& deviceId, const std::list<vides_data::responseDeviceStatus>& devices) { bool MainWindow::isDeviceInList(const QString& deviceId, const std::list<vides_data::responseDeviceStatus>& devices) {
...@@ -696,11 +726,22 @@ void MainWindow::iniRecordingToString(QString &recorJson){ ...@@ -696,11 +726,22 @@ void MainWindow::iniRecordingToString(QString &recorJson){
QJsonDocument jsonDocument(jsonArray); QJsonDocument jsonDocument(jsonArray);
recorJson = QString::fromUtf8(jsonDocument.toJson()); recorJson = QString::fromUtf8(jsonDocument.toJson());
} }
void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::list<vides_data::responseArea>&areas){
__uint8_t MainWindow::intToUint8t(int algorithm){
if (algorithm >= 0 && algorithm <= 255) {
return static_cast<__uint8_t>(algorithm);
}
// 处理错误或取值超出范围的情况
qInfo()<<"Value out of range for conversion to __uint8_t";
return 0x07;
}
void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::list<vides_data::responseArea>&areas,int algorithm,std::list<vides_data::requestCameraInfo>&camera_info_list){
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
float carConfidence=qSetting->value("devices/carConfidence").toFloat(); float carConfidence=qSetting->value("devices/carConfidence").toFloat();
int image_save=qSetting->value("devices/image_save").toInt(); int image_save=qSetting->value("devices/image_save").toInt();
CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,modelPaths,carConfidence,image_save); CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,modelPaths,carConfidence,image_save);
int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,10000); int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,10000);
...@@ -715,13 +756,20 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std:: ...@@ -715,13 +756,20 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
cameraHandle->sdkDevSetAlarmListener(sdk_handle,1); cameraHandle->sdkDevSetAlarmListener(sdk_handle,1);
int synTime=qSetting->value("timer/dev_snap_syn_timer").toInt(); int synTime=qSetting->value("timer/dev_snap_syn_timer").toInt();
uint64 face_frequency=qSetting->value("devices/face_frequency").toULongLong(); uint64 face_frequency=qSetting->value("devices/face_frequency").toULongLong();
cameraHandle->initSdkRealTimeDevSnapSyn(sdk_handle,synTime,face_frequency); cameraHandle->initSdkRealTimeDevSnapSyn(sdk_handle,synTime,face_frequency);
int seTime=qSetting->value("timer/semaphore_time").toInt(); vides_data::requestCameraInfo camera_info;
cameraHandle->setTimeoutMs(seTime); camera_info.sSn=parameter.sSn;
camera_info.ip_addr=parameter.sDevId;
cameraHandle->findFirmwareVersion(camera_info.firmware_version);
camera_info_list.push_front(camera_info);
__uint8_t new_algorithm= intToUint8t(algorithm);
cameraHandle->initAlgorithmPermissions(new_algorithm);
cameraHandle->initParkingSpaceInfo(areas); cameraHandle->initParkingSpaceInfo(areas);
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
QString key =parameter.sDevId + ":" + QString::number(parameter.nDevPort); QString key =parameter.sDevId + ":" + QString::number(parameter.nDevPort);
faceDetectionParkingPushs[key]= cameraHandle; faceDetectionParkingPushs[key]= cameraHandle;
HttpService httpService(parameter.httpUrl); HttpService httpService(parameter.httpUrl);
......
...@@ -40,37 +40,40 @@ public: ...@@ -40,37 +40,40 @@ public:
void createDirectory(int flag,const QString& dirName, const QString& successMsg, const QString& failureMsg); void createDirectory(int flag,const QString& dirName, const QString& successMsg, const QString& failureMsg);
void initFaceFaceRecognition(); void initFaceFaceRecognition();
void initCameras(vides_data::cameraParameters &parameter,const std::list<vides_data::responseArea>&areas); void initCameras(vides_data::cameraParameters &parameter,
const std::list<vides_data::responseArea>&areas,int algorithm,std::list<vides_data::requestCameraInfo>&camera_info_list);
__uint8_t intToUint8t(int algorithm);
static MainWindow * sp_this; static MainWindow * sp_this;
CameraHandle* findHandle(QString sn);
void sendJsonResponse(QTcpSocket* socket, int code, const QString& data, const QString& msg); void sendJsonResponse(QTcpSocket* socket, int code, const QString& data, const QString& msg);
void sendEmptyResponse(QTcpSocket* socket); void sendEmptyResponse(QTcpSocket* socket);
void sendNotFoundResponse(QTcpSocket* socket); void sendNotFoundResponse(QTcpSocket* socket);
void updateLocalFace(const QString &httpurl); void updateLocalFace(const QString &httpurl);
void removeImageFiles(QString id); void removeImageFiles(QString id);
void modifyImagesAndNames(QString &modId); void modifyImagesAndNames(QString &modId);
void findLocalSerialNumber(QString &serialNumber); void findLocalSerialNumber(QString &serialNumber);
void initDevConfigSyn(CameraHandle *cameraHandle); void initDevConfigSyn(CameraHandle *cameraHandle);
void iniRecordingToString(QString &recorJson); void iniRecordingToString(QString &recorJson);
void iniEncodeToString(QString &enCodeJson); void iniEncodeToString(QString &enCodeJson);
void clearOfflineCameraHandle(QString sDevId, int nDevPort); void clearOfflineCameraHandle(QString sDevId, int nDevPort);
bool iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &workSpWMn,QString &sn); bool iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &workSpWMn,QString &sn);
bool isDeviceInList(const QString& deviceId, const std::list<vides_data::responseDeviceStatus>& devices); bool isDeviceInList(const QString& deviceId, const std::list<vides_data::responseDeviceStatus>& devices);
// 过滤函数 // 过滤函数
void deleteCloudNotCamer (const std::map<QString,vides_data::localDeviceStatus*>& localDevices, void deleteCloudNotCamer (const std::map<QString,vides_data::localDeviceStatus*>& localDevices,
...@@ -84,9 +87,9 @@ private slots: ...@@ -84,9 +87,9 @@ private slots:
void startCamera(const QString &httpurl); void startCamera(const QString &httpurl);
void deleteLogFile(); void deleteLogFile();
void clearHandle(QString sDevId, int nDevPort); void clearHandle(QString sDevId, int nDevPort);
void deleteMkvFileTimer(); void deleteMkvFileTimer();
void handleMatNewConnection(); void handleMatNewConnection();
...@@ -98,11 +101,12 @@ private: ...@@ -98,11 +101,12 @@ private:
QTimer *deleteLogFileTimer; QTimer *deleteLogFileTimer;
QTimer *deleteFrameFileTimer; QTimer *deleteFrameFileTimer;
QTimer*dePermissionSynTimer; QTimer*dePermissionSynTimer;
QTcpServer server; QTcpServer server;
QString localSn;
//本地id:图片路径 //本地id:图片路径
std::map<QString,QString>localImageMap; std::map<QString,QString>localImageMap;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment