Commit b6009d23 by Amos

Merge branch 'release' into 'master'

merge Release

See merge request !102
parents 987a0177 ac65e986
...@@ -42,7 +42,7 @@ void AlgorithmTaskManage::initialize(int humanDetectionLen, int licensePlateLen, ...@@ -42,7 +42,7 @@ void AlgorithmTaskManage::initialize(int humanDetectionLen, int licensePlateLen,
} }
void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths, void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,int &uniformColor) { float carShapeConfidence,QString &uniformColor) {
for (int i = 0; i < humanDetectionLen; ++i) { for (int i = 0; i < humanDetectionLen; ++i) {
HumanDetection* human=new HumanDetection(modelPaths,carShapeConfidence); HumanDetection* human=new HumanDetection(modelPaths,carShapeConfidence);
human->setHuManParameter(uniformColor); human->setHuManParameter(uniformColor);
...@@ -113,7 +113,7 @@ void AlgorithmTaskManage::releaseResources(const vides_data::DetectionParams& pa ...@@ -113,7 +113,7 @@ void AlgorithmTaskManage::releaseResources(const vides_data::DetectionParams& pa
QString modelPath = params.modelPaths; QString modelPath = params.modelPaths;
float humanCarShapeConfidence = params.humanCarShapeConfidence; float humanCarShapeConfidence = params.humanCarShapeConfidence;
int uniformColor = params.uniformColor; QString uniformColor = params.uniformColor;
std::map<QString, QString> faceMaps = params.faceMaps; std::map<QString, QString> faceMaps = params.faceMaps;
int numberFaces = params.numberFaces; int numberFaces = params.numberFaces;
float faceConfidence = params.faceConfidence; float faceConfidence = params.faceConfidence;
......
...@@ -26,7 +26,7 @@ public: ...@@ -26,7 +26,7 @@ public:
void initialize(int humanDetectionLen, int licensePlateLen, int faceLen,bool first,__uint8_t algorithmPermissions ); void initialize(int humanDetectionLen, int licensePlateLen, int faceLen,bool first,__uint8_t algorithmPermissions );
void initHumanDetectionManage(const QString &modelPaths, void initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,int &uniformColor); float carShapeConfidence,QString &uniformColor);
void initLicensePlateManage(const QString &modelPaths,bool is_high,int maxNum,bool useHalf, void initLicensePlateManage(const QString &modelPaths,bool is_high,int maxNum,bool useHalf,
float boxThreshold,float nmsThreshold,float recThreshold); float boxThreshold,float nmsThreshold,float recThreshold);
......
...@@ -217,15 +217,20 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n ...@@ -217,15 +217,20 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
} }
} }
} }
} }
int CameraHandle::sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener) { int CameraHandle::sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener) {
return XSDK_DevSetAlarmListener(hDevice,bListener); return XSDK_DevSetAlarmListener(hDevice,bListener);
} }
void CameraHandle::setHkDevice(bool hk_status,int hk_Device){
this->hk_Device=hk_Device;
this->hk_status=hk_status;
}
int CameraHandle::getChannel(){ int CameraHandle::getChannel(){
return channel; return channel;
} }
int CameraHandle::getHdevice() { int CameraHandle::getHdevice() {
return hDevice; return hDevice;
} }
...@@ -394,6 +399,10 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) { ...@@ -394,6 +399,10 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
{ {
qInfo() << "OnDevAlarmCallback[Dev:" << hObject << "][Event:" << szString << "]"; qInfo() << "OnDevAlarmCallback[Dev:" << hObject << "][Event:" << szString << "]";
} }
if(algorithmPermissions ==0x00){
return -1 ;
}
cv::Mat image; cv::Mat image;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch(); qint64 currentTime= QDateTime::currentSecsSinceEpoch();
...@@ -401,9 +410,24 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) { ...@@ -401,9 +410,24 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
if (image.empty()) if (image.empty())
{ {
qInfo() << "Failed to read the image"; qInfo() << "FaceImageCallBack抓图失败,尝试调用FaceHkImageCallBack";
if(!hk_status){
qInfo() <<QString("SN(%1): callbackFunction通过IP获取到对应通道号失败").arg(sSn);
return -1;
}
int hk_channel= mediaFaceImage->getIPChannelInfo(hk_Device,QString(loginParam->sDevId));
if(hk_channel<0){
qInfo() <<QString("SN(%1): callbackFunction通过IP获取到对应通道号失败").arg(sSn);
return -1;
}
// 调用FaceHkImageCallBack抓图
mediaFaceImage->FaceHkImageCallBack(hk_Device, hk_channel, image, sSn);
// 再次检查抓图是否成功
if (image.empty()) {
qInfo() << "FaceHkImageCallBack抓图也失败";
return -1; return -1;
} }
}
if (image.rows <= 0 || image.cols <= 0 || image.channels() <= 0) { if (image.rows <= 0 || image.cols <= 0 || image.channels() <= 0) {
qInfo() << "图像尺寸或通道数不正确,需排查原因"; qInfo() << "图像尺寸或通道数不正确,需排查原因";
return -1; return -1;
...@@ -412,25 +436,8 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) { ...@@ -412,25 +436,8 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
updateImage(image, currentTime); updateImage(image, currentTime);
} }
void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){ bool CameraHandle::threeConsecutiveImage(){
if(hDevice<=0){
qInfo() << QString("SN(%1): 相机断线").arg(sSn);
return;
}
if (!semaphore.tryAcquire()) {
qInfo() << QString("SN(%1): callbackFunction:正在执行线程").arg(sSn);;
return ;
}
ScopeSemaphoreExit guard([this]() {
semaphore.release(); // 释放信号量
});
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
cv::Mat image;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch();
int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image,sSn);
if (ret < 0) {
offlineCount++; // 累加计数器 offlineCount++; // 累加计数器
if (offlineCount >= 3) { // 判断是否连续3次返回0 if (offlineCount >= 3) { // 判断是否连续3次返回0
qInfo() <<QString("SN(%1): 设备离线").arg(sSn); qInfo() <<QString("SN(%1): 设备离线").arg(sSn);
...@@ -448,11 +455,51 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){ ...@@ -448,11 +455,51 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
// TODO: 可以在此处更新设备状态、发送告警通知等 // TODO: 可以在此处更新设备状态、发送告警通知等
// 重置计数器,以便下次再次检测连续离线 // 重置计数器,以便下次再次检测连续离线
offlineCount = 0; offlineCount = 0;
return true;
}
return false;
}
void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
if(hDevice<=0){
qInfo() << QString("SN(%1): 相机断线").arg(sSn);
return; return;
} }
if (!semaphore.tryAcquire()) {
qInfo() << QString("SN(%1): callbackFunction:正在执行线程").arg(sSn);
return ;
}
ScopeSemaphoreExit guard([this]() {
semaphore.release(); // 释放信号量
});
if(algorithmPermissions ==0x00){
return ;
}
cv::Mat image;
MediaFaceImage *mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch();
int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image,sSn);
if (ret <= 0) {
if(!hk_status){
if(threeConsecutiveImage())return;
}else {
int hk_channel= mediaFaceImage->getIPChannelInfo(hk_Device,QString(loginParam->sDevId));
if(hk_channel<0){
qInfo() <<QString("SN(%1): NVR=>sdkDevSnapSyn 通过IP获取到对应通道号失败").arg(sSn);
return ;
}
int retHk = mediaFaceImage->FaceHkImageCallBack(hk_Device, hk_channel, image, sSn);
if(retHk<0){
qInfo() <<QString("SN(%1): NVR=>FaceHkImageCallBack 句柄获取图片失败").arg(sSn);
if(threeConsecutiveImage())return;
}else {
// NVR抓图成功,则重置计数器
offlineCount = 0;
}
}
} else { } else {
// 如果不连续,则重置计数器 //第一次相机抓图成功,重置计数器
offlineCount = 0; offlineCount = 0;
} }
if (image.empty()) if (image.empty())
...@@ -462,7 +509,6 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){ ...@@ -462,7 +509,6 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
} }
if (image.rows <= 0 || image.cols <= 0 || image.channels() <= 0) { if (image.rows <= 0 || image.cols <= 0 || image.channels() <= 0) {
qInfo()<<QString("SN(%1): 图像尺寸或通道数不正确,需排查原因").arg(sSn); qInfo()<<QString("SN(%1): 图像尺寸或通道数不正确,需排查原因").arg(sSn);
return ; return ;
} }
updateImage(image, currentTime); updateImage(image, currentTime);
...@@ -474,6 +520,7 @@ void CameraHandle::matToBase64(const cv::Mat &image, QByteArray &base64Data) { ...@@ -474,6 +520,7 @@ void CameraHandle::matToBase64(const cv::Mat &image, QByteArray &base64Data) {
cv::imencode(".jpg", image, buffer, params); cv::imencode(".jpg", image, buffer, params);
base64Data = QByteArray(reinterpret_cast<const char*>(buffer.data()), buffer.size()).toBase64(); base64Data = QByteArray(reinterpret_cast<const char*>(buffer.data()), buffer.size()).toBase64();
} }
void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat &frame, RecognizedInfo& newInfo, void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat &frame, RecognizedInfo& newInfo,
int &result){ int &result){
if (newInfo.getLicensePlate() != park->getCurrentPlate().getLicensePlate()) { if (newInfo.getLicensePlate() != park->getCurrentPlate().getLicensePlate()) {
...@@ -511,7 +558,7 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma ...@@ -511,7 +558,7 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
}else { }else {
// 没有车辆或车辆在停车区域内部,移除队列 // 没有车辆或车辆在停车区域内部,移除队列
park->removeNoQueue(); park->removeNoQueue();
qDebug()<<QString("SN(%1): no出场::%2").arg(sSn).arg(car_size); qInfo()<<QString("SN(%1): no出场::%2").arg(sSn).arg(car_size);
} }
}else{ }else{
//当前不为空,新车,新车入场,老车出场 //当前不为空,新车,新车入场,老车出场
...@@ -750,7 +797,6 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -750,7 +797,6 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
if(image_save==1){ if(image_save==1){
QString fileName= instace.getVideoOut().append(instace.getTimeString()+".jpg"); QString fileName= instace.getVideoOut().append(instace.getTimeString()+".jpg");
bool success = cv::imwrite(fileName.toStdString(), frame); bool success = cv::imwrite(fileName.toStdString(), frame);
if (success) { if (success) {
qInfo() << "图片已成功保存至:" << fileName; qInfo() << "图片已成功保存至:" << fileName;
} else { } else {
...@@ -1164,7 +1210,7 @@ void CameraHandle::initWorkSpVMn(vides_data::responseGb28181 *gb28181, QString & ...@@ -1164,7 +1210,7 @@ void CameraHandle::initWorkSpVMn(vides_data::responseGb28181 *gb28181, QString &
toJsonObject["uiAlarmStateLoseEnable"] = "0xFFFFFFFF"; toJsonObject["uiAlarmStateLoseEnable"] = "0xFFFFFFFF";
toJsonObject["uiAlarmStateMotionEnable"] = "0xFFFFFFFF"; toJsonObject["uiAlarmStateMotionEnable"] = "0xFFFFFFFF";
toJsonObject["uiAlarmStatePerformanceEnable"] = "0xFFFFFFFF"; toJsonObject["uiAlarmStatePerformanceEnable"] = "0xFFFFFFFF";
toJsonObject["sUdpPort"] = 5060; toJsonObject["sUdpPort"] = gb28181->sUdpPort;
// 生成 workSpWMn // 生成 workSpWMn
QJsonDocument doc(toJsonObject); QJsonDocument doc(toJsonObject);
...@@ -1312,6 +1358,7 @@ void CameraHandle::updateSdkDevSpvMn(vides_data::responseGb28181 *gb28181){ ...@@ -1312,6 +1358,7 @@ void CameraHandle::updateSdkDevSpvMn(vides_data::responseGb28181 *gb28181){
const char* szDeviceNO = config.szDeviceNO.ToString(); const char* szDeviceNO = config.szDeviceNO.ToString();
const char* szServerDn = config.szServerDn.ToString(); const char* szServerDn = config.szServerDn.ToString();
const char* szServerNo = config.szServerNo.ToString(); const char* szServerNo = config.szServerNo.ToString();
int u_port=config.sUdpPort.ToInt();
bool isEqual = (szCsIP == gb28181->sip_ip && bool isEqual = (szCsIP == gb28181->sip_ip &&
sCsPort == gb28181->sip_port && sCsPort == gb28181->sip_port &&
szServerNo == gb28181->serial && szServerNo == gb28181->serial &&
...@@ -1320,6 +1367,7 @@ void CameraHandle::updateSdkDevSpvMn(vides_data::responseGb28181 *gb28181){ ...@@ -1320,6 +1367,7 @@ void CameraHandle::updateSdkDevSpvMn(vides_data::responseGb28181 *gb28181){
iHsIntervalTime == gb28181->heartbeat_interval && iHsIntervalTime == gb28181->heartbeat_interval &&
szConnPass == gb28181->password && szConnPass == gb28181->password &&
szDeviceNO == gb28181->device_id && szDeviceNO == gb28181->device_id &&
u_port == gb28181->sUdpPort &&
Camreaid == gb28181->channel_id); Camreaid == gb28181->channel_id);
if(!isEqual){ if(!isEqual){
config.Camreaid.InitArraySize(64); config.Camreaid.InitArraySize(64);
...@@ -1347,7 +1395,7 @@ void CameraHandle::updateSdkDevSpvMn(vides_data::responseGb28181 *gb28181){ ...@@ -1347,7 +1395,7 @@ void CameraHandle::updateSdkDevSpvMn(vides_data::responseGb28181 *gb28181){
config.szServerNo.SetValue(sz_ServerNo); config.szServerNo.SetValue(sz_ServerNo);
config.sCsPort.SetValue(gb28181->sip_port); config.sCsPort.SetValue(gb28181->sip_port);
config.sUdpPort.SetValue(5060); config.sUdpPort.SetValue(gb28181->sUdpPort);
QByteArray && bSzServerDn=gb28181->realm.toUtf8(); QByteArray && bSzServerDn=gb28181->realm.toUtf8();
char* sz_ServerDn = bSzServerDn.data(); char* sz_ServerDn = bSzServerDn.data();
......
#ifndef CAMERAHANDLE_H #ifndef CAMERAHANDLE_H
#define CAMERAHANDLE_H #define CAMERAHANDLE_H
#include "RecognitionInfo.h" #include "RecognizedInfo.h"
#include "FaceReconitionHandle.h" #include "FaceReconitionHandle.h"
#include "HttpService.h" #include "HttpService.h"
#include "LicensePlateRecognition.h" #include "LicensePlateRecognition.h"
...@@ -50,9 +50,9 @@ public: ...@@ -50,9 +50,9 @@ public:
~CameraHandle(); ~CameraHandle();
int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout); int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
//int SdkMediaGetFaceImage(int hDevice, int nSeq, int nTimeout); //int SdkMediaGetFaceImage(int hDevice, int nSeq, int nTimeout);
int sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener); int sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener);
void setHkDevice(bool hk_status,int hk_Device);
int getHdevice(); int getHdevice();
int getChannel(); int getChannel();
...@@ -83,6 +83,8 @@ public: ...@@ -83,6 +83,8 @@ public:
void licensePlateRecognitionResults(vides_data::requestLicensePlate &location); void licensePlateRecognitionResults(vides_data::requestLicensePlate &location);
bool threeConsecutiveImage();
void sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel); void sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel);
void printWifi(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg); void printWifi(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg);
...@@ -185,7 +187,10 @@ private slots: ...@@ -185,7 +187,10 @@ private slots:
private : private :
int hDevice; int hDevice;
int hk_Device;
bool hk_status;
int channel; int channel;
QString httpUrl; QString httpUrl;
SXSDKLoginParam *loginParam; SXSDKLoginParam *loginParam;
......
...@@ -51,7 +51,7 @@ cv::Mat FaceReconitionHandle::loadImage(const QString &path) { ...@@ -51,7 +51,7 @@ cv::Mat FaceReconitionHandle::loadImage(const QString &path) {
qInfo() << "图像以CImg成功加载。"; qInfo() << "图像以CImg成功加载。";
return image; return image;
} }
image=loadImageWithSTB(path); image=loadImageWithStb(path);
if (!image.empty()) { if (!image.empty()) {
qInfo() << "图像以stb_image成功加载。"; qInfo() << "图像以stb_image成功加载。";
return image; return image;
...@@ -204,7 +204,7 @@ void FaceReconitionHandle::featureRemoveMap(std::map<QString, QString> &maps){ ...@@ -204,7 +204,7 @@ void FaceReconitionHandle::featureRemoveMap(std::map<QString, QString> &maps){
} }
cv::Mat FaceReconitionHandle::loadImageWithSTB(const QString& filename) { cv::Mat FaceReconitionHandle::loadImageWithStb(const QString& filename) {
int width, height, channels; int width, height, channels;
QByteArray && b_filename = filename.toUtf8(); QByteArray && b_filename = filename.toUtf8();
char* c_filename = b_filename.data(); char* c_filename = b_filename.data();
......
...@@ -32,7 +32,7 @@ public: ...@@ -32,7 +32,7 @@ public:
cv::Mat loadQImageFromByteStream(const QString& filePath); cv::Mat loadQImageFromByteStream(const QString& filePath);
cv::Mat loadImageWithSTB(const QString& filename); cv::Mat loadImageWithStb(const QString& filename);
bool getImageChanged()const; bool getImageChanged()const;
......
#include "HttpService.h" #include "HttpService.h"
vides_data::responseStsCredentials HttpService::stsCredentials; vides_data::responseStsCredentials HttpService::stsCredentials;
QString HttpService::sing_key;
HttpService::HttpService() { HttpService::HttpService() {
...@@ -45,6 +46,7 @@ vides_data::response* HttpService::httpPostDeviceStatus(vides_data::requestDevic ...@@ -45,6 +46,7 @@ vides_data::response* HttpService::httpPostDeviceStatus(vides_data::requestDevic
vides_data::response *resp = new vides_data::response(); vides_data::response *resp = new vides_data::response();
QNetworkRequest request; QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl)); request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE); request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex); QMutexLocker locker(&m_httpClientMutex);
...@@ -78,6 +80,7 @@ vides_data::response* HttpService::httpPostRecord(int id,int recongnition_type,Q ...@@ -78,6 +80,7 @@ vides_data::response* HttpService::httpPostRecord(int id,int recongnition_type,Q
vides_data::response *resp=new vides_data::response(); vides_data::response *resp=new vides_data::response();
QNetworkRequest request; QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl)); request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE); request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex); QMutexLocker locker(&m_httpClientMutex);
...@@ -99,6 +102,15 @@ vides_data::response* HttpService::httpPostRecord(int id,int recongnition_type,Q ...@@ -99,6 +102,15 @@ vides_data::response* HttpService::httpPostRecord(int id,int recongnition_type,Q
void HttpService::setHttpUrl(const QString &httpUrl){ void HttpService::setHttpUrl(const QString &httpUrl){
this->httpUrl=httpUrl; this->httpUrl=httpUrl;
} }
void HttpService::setSingKey(const QString &key) {
sing_key = key;
}
QString HttpService::getSingKey() {
return sing_key;
}
vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_data::responseDeviceData&responseData) { vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_data::responseDeviceData&responseData) {
httpUrl.append("/api/v1.0/device/all"); httpUrl.append("/api/v1.0/device/all");
vides_data::response *resp=new vides_data::response(); vides_data::response *resp=new vides_data::response();
...@@ -106,6 +118,7 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d ...@@ -106,6 +118,7 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d
query.addQueryItem("sn",serialNumber); query.addQueryItem("sn",serialNumber);
query.addQueryItem("new_token",QString::number(1)); query.addQueryItem("new_token",QString::number(1));
QNetworkRequest request; QNetworkRequest request;
assembleSingHeaders(request);
QUrl url(httpUrl); QUrl url(httpUrl);
url.setQuery(query); url.setQuery(query);
request.setUrl(url); request.setUrl(url);
...@@ -222,6 +235,7 @@ vides_data::response *HttpService::httpLicensePlateRecognition(vides_data::reque ...@@ -222,6 +235,7 @@ vides_data::response *HttpService::httpLicensePlateRecognition(vides_data::reque
QByteArray bytearr= doc.toJson(QJsonDocument::Compact); QByteArray bytearr= doc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response(); vides_data::response *resp=new vides_data::response();
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl)); request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE); request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex); QMutexLocker locker(&m_httpClientMutex);
...@@ -264,6 +278,7 @@ vides_data::response* HttpService::httpFindFaceReconition(QString &serialNumber, ...@@ -264,6 +278,7 @@ vides_data::response* HttpService::httpFindFaceReconition(QString &serialNumber,
query.addQueryItem("sn",serialNumber); query.addQueryItem("sn",serialNumber);
QUrl url(httpUrl); QUrl url(httpUrl);
url.setQuery(query); url.setQuery(query);
assembleSingHeaders(request);
request.setUrl(url); request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE); request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex); QMutexLocker locker(&m_httpClientMutex);
...@@ -305,6 +320,7 @@ vides_data::response *HttpService::httpPostUniforms(QByteArray &img,QString &id, ...@@ -305,6 +320,7 @@ vides_data::response *HttpService::httpPostUniforms(QByteArray &img,QString &id,
vides_data::response *resp=new vides_data::response(); vides_data::response *resp=new vides_data::response();
QNetworkRequest request; QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl)); request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE); request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex); QMutexLocker locker(&m_httpClientMutex);
...@@ -346,6 +362,7 @@ vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &h ...@@ -346,6 +362,7 @@ vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &h
vides_data::response *resp=new vides_data::response(); vides_data::response *resp=new vides_data::response();
QNetworkRequest request; QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl)); request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE); request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex); QMutexLocker locker(&m_httpClientMutex);
...@@ -394,6 +411,7 @@ vides_data::response *HttpService::httpPostFaceReconition(vides_data::requestFac ...@@ -394,6 +411,7 @@ vides_data::response *HttpService::httpPostFaceReconition(vides_data::requestFac
vides_data::response *resp=new vides_data::response(); vides_data::response *resp=new vides_data::response();
QNetworkRequest request; QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl)); request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE); request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
...@@ -419,6 +437,7 @@ vides_data::response*HttpService::httpFindGb28181Config(QString &serialNumber){ ...@@ -419,6 +437,7 @@ vides_data::response*HttpService::httpFindGb28181Config(QString &serialNumber){
QNetworkRequest request; QNetworkRequest request;
QUrl url(httpUrl); QUrl url(httpUrl);
url.setQuery(query); url.setQuery(query);
assembleSingHeaders(request);
request.setUrl(url); request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE); request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex); QMutexLocker locker(&m_httpClientMutex);
...@@ -442,6 +461,8 @@ vides_data::response*HttpService::httpFindGb28181Config(QString &serialNumber){ ...@@ -442,6 +461,8 @@ vides_data::response*HttpService::httpFindGb28181Config(QString &serialNumber){
response->heartbeat_interval = data["heartbeat_interval"].toInt(); response->heartbeat_interval = data["heartbeat_interval"].toInt();
response->device_id = data["device_id"].toString(); response->device_id = data["device_id"].toString();
response->channel_id = data["channel_id"].toString(); response->channel_id = data["channel_id"].toString();
QString local_port= data["local_port"].toString();
response->sUdpPort=local_port.toInt();
resp->data=response; resp->data=response;
resp->msg=map["message"].toString(); resp->msg=map["message"].toString();
}else{ }else{
...@@ -460,6 +481,7 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber, ...@@ -460,6 +481,7 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
QNetworkRequest request; QNetworkRequest request;
QUrl url(httpUrl); QUrl url(httpUrl);
url.setQuery(query); url.setQuery(query);
assembleSingHeaders(request);
request.setUrl(url); request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE); request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex); QMutexLocker locker(&m_httpClientMutex);
...@@ -531,7 +553,7 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber, ...@@ -531,7 +553,7 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
// 解析 uniformConfig // 解析 uniformConfig
QJsonObject uniformConfigObj = dataObj["uniformConfig"].toObject(); QJsonObject uniformConfigObj = dataObj["uniformConfig"].toObject();
config.uniformConfig.isOn = uniformConfigObj["isOn"].toBool(); config.uniformConfig.isOn = uniformConfigObj["isOn"].toBool();
config.uniformConfig.uniformColor = uniformConfigObj["uniformColor"].toInt(); config.uniformConfig.uniformColor = uniformConfigObj["uniformColor"].toString();
config.uniformConfig.humanDetectionLen = uniformConfigObj["humanDetectionLen"].toInt(); config.uniformConfig.humanDetectionLen = uniformConfigObj["humanDetectionLen"].toInt();
config.uniformConfig.updateAt = uniformConfigObj["updateAt"].toVariant().toULongLong(); config.uniformConfig.updateAt = uniformConfigObj["updateAt"].toVariant().toULongLong();
config.uniformConfig.carShapeConfidence = uniformConfigObj["carShapeConfidence"].toVariant().toFloat(); config.uniformConfig.carShapeConfidence = uniformConfigObj["carShapeConfidence"].toVariant().toFloat();
...@@ -540,6 +562,13 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber, ...@@ -540,6 +562,13 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
config.humanConfig.isOn=humanConfigObj["isOn"].toBool(); config.humanConfig.isOn=humanConfigObj["isOn"].toBool();
config.humanConfig.updateAt = humanConfigObj["updateAt"].toVariant().toULongLong(); config.humanConfig.updateAt = humanConfigObj["updateAt"].toVariant().toULongLong();
config.humanConfig.faceFrequency = humanConfigObj["faceFrequency"].toVariant().toUInt(); config.humanConfig.faceFrequency = humanConfigObj["faceFrequency"].toVariant().toUInt();
//解析nvrConfig;
QJsonObject nvrConfigObj= dataObj["nvrConfig"].toObject();
config.nvrConfig.isOn=nvrConfigObj["isOn"].toBool();
config.nvrConfig.ip=nvrConfigObj["ip"].toString();
config.nvrConfig.port=(unsigned short) nvrConfigObj["port"].toInt();
config.nvrConfig.password=nvrConfigObj["password"].toString();
config.nvrConfig.username=nvrConfigObj["username"].toString();
// 解析 devicesConfig // 解析 devicesConfig
QJsonObject devicesConfigObj = dataObj["camera"].toObject(); QJsonObject devicesConfigObj = dataObj["camera"].toObject();
...@@ -573,6 +602,23 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber, ...@@ -573,6 +602,23 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
} }
void HttpService::assembleSingHeaders(QNetworkRequest &reques){
QString ts = QString::number(QDateTime::currentMSecsSinceEpoch() / 1000);
// 计算签名
QCryptographicHash hash(QCryptographicHash::Md5);
hash.addData(HttpService::getSingKey().toUtf8());
hash.addData(ts.toUtf8());
QString sign = hash.result().toHex();
// 设置头部信息
QMap<QString, QVariant> headers;
headers.insert("ts", ts);
headers.insert("sign", sign);
for (auto it = headers.begin(); it != headers.end(); ++it) {
reques.setRawHeader(it.key().toLatin1(), it.value().toString().toLatin1());
}
}
vides_data::response*HttpService::httpFindStream(QString &serialNumber){ vides_data::response*HttpService::httpFindStream(QString &serialNumber){
httpUrl.append("/api/v1.0/stream"); httpUrl.append("/api/v1.0/stream");
......
...@@ -49,7 +49,13 @@ public: ...@@ -49,7 +49,13 @@ public:
vides_data::response *httpUploadFile(const QString &filePath,QString& accessKeyId,QString& accessKeySecret, vides_data::response *httpUploadFile(const QString &filePath,QString& accessKeyId,QString& accessKeySecret,
QString & bucketName,QString &securityToken); QString & bucketName,QString &securityToken);
void setHttpUrl(const QString & httpUrl); void setHttpUrl(const QString & httpUrl);
static void setSingKey(const QString &key);
static QString getSingKey();
//组装验证头
void assembleSingHeaders(QNetworkRequest &reques);
vides_data::response *httpDeviceConfig(const QString &serialNumber,vides_data::responseConfig &config); vides_data::response *httpDeviceConfig(const QString &serialNumber,vides_data::responseConfig &config);
...@@ -63,5 +69,6 @@ private: ...@@ -63,5 +69,6 @@ private:
QMutex m_httpClientMutex; QMutex m_httpClientMutex;
static QString sing_key;
}; };
#endif // HTTPSERVICE_H #endif // HTTPSERVICE_H
...@@ -72,7 +72,7 @@ void HumanDetection::draw_human_on_image(const cv::Mat& image, const TCV_ObjectL ...@@ -72,7 +72,7 @@ void HumanDetection::draw_human_on_image(const cv::Mat& image, const TCV_ObjectL
} }
} }
void HumanDetection::setHuManParameter(int &uniformColor){ void HumanDetection::setHuManParameter(QString &uniformColor){
this->uniformColor=uniformColor; this->uniformColor=uniformColor;
} }
...@@ -98,23 +98,38 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int ...@@ -98,23 +98,38 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
int num = 0; int num = 0;
if (res == 0x00 || res == 0x02) { if (res == 0x00 || res == 0x02) {
QStringList colorList = uniformColor.split(',');
num = TCV_HumanDetectorGetNumOfHuman(detector); num = TCV_HumanDetectorGetNumOfHuman(detector);
qInfo() << QString("SN(%1): 获取人形数量:%2").arg(sSn).arg(num); qInfo() << QString("SN(%1): 获取人形数量:%2").arg(sSn).arg(num);
if (num == 0) return num; // 无行人检测结果,提前返回 if (num == 0) return num; // 无行人检测结果,提前返回
if(res==0x02 && colorList.size()==0){
reMap[0x02] = 0; // 未穿工服的行人数量
reMap[0x00] = num; // 所有满足条件的行人数量
num = 0;
return num;
}
int count_no_uniform = 0; // 未穿工服的行人数量
int count_all = 0; // 所有满足条件的行人数量
std::vector<TCV_ObjectLocation> results(num); std::vector<TCV_ObjectLocation> results(num);
TCV_HumanDetectorGetHumanLocation(detector, results.data(), num); TCV_HumanDetectorGetHumanLocation(detector, results.data(), num);
int count_no_uniform = 0; // 未穿工服的行人数量 QSet<int> uniformNumbers;
int count_all = 0; // 所有满足条件的行人数量 for (const QString &numStr : colorList) {
bool ok;
int num = numStr.toInt(&ok);
if (ok) {
uniformNumbers.insert(num);
}
}
for (const auto &person : results) { for (const auto &person : results) {
int tenPlace = uniformColor / 10; // 十位
int onePlace = uniformColor % 10; // 个位
if (std::abs(person.y2 - person.y1) >= heightReference) { if (std::abs(person.y2 - person.y1) >= heightReference) {
++count_all; ++count_all;
//工服 //工服
if(person.uniform != tenPlace && person.uniform != onePlace){ if(!uniformNumbers.contains(person.uniform)){
vides_data::ParkingArea area; vides_data::ParkingArea area;
area.topLeftCornerX=person.x1; area.topLeftCornerX=person.x1;
area.topLeftCornerY=person.y1; area.topLeftCornerY=person.y1;
......
...@@ -19,7 +19,7 @@ public: ...@@ -19,7 +19,7 @@ public:
int findHuManCar(const cv::Mat &source,int res,std::map<int,int>&reMap,QString &sSn, int findHuManCar(const cv::Mat &source,int res,std::map<int,int>&reMap,QString &sSn,
float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate); float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate);
void setHuManParameter(int &uniformColor); void setHuManParameter(QString &uniformColor);
void draw_human_on_image(const cv::Mat& image, const TCV_ObjectLocation* boxes, int size); void draw_human_on_image(const cv::Mat& image, const TCV_ObjectLocation* boxes, int size);
...@@ -27,7 +27,7 @@ private: ...@@ -27,7 +27,7 @@ private:
//高度基准 //高度基准
float heightReference; float heightReference;
int uniformColor; QString uniformColor;
TCV_HumanDetector *detector; TCV_HumanDetector *detector;
......
...@@ -117,6 +117,21 @@ int MediaFaceImage::SdkSearchDevicesSyn(std::map<QString, vides_data::localDevic ...@@ -117,6 +117,21 @@ int MediaFaceImage::SdkSearchDevicesSyn(std::map<QString, vides_data::localDevic
} }
int MediaFaceImage::SdkIpcSearchDevicesSyn( int lUserID, NET_DVR_IPPARACFG_V40 *reqParams){
int iRet = -1;
DWORD uiReturnLen = 0;
LONG lChannel = 0;
//get
iRet = NET_DVR_GetDVRConfig(lUserID, NET_DVR_GET_IPPARACFG_V40, lChannel, \
reqParams, sizeof(NET_DVR_IPPARACFG_V40), &uiReturnLen);
if (!iRet){
qInfo() <<QString("pyd---NET_DVR_GetDVRConfig NET_DVR_GET_IPPARACFG_V40 error.%1\n").arg(NET_DVR_GetLastError());
return -1;
}
return 0;
}
int MediaFaceImage::SdkInit(QString &szConfigPath, QString &szTempPath) { int MediaFaceImage::SdkInit(QString &szConfigPath, QString &szTempPath) {
SXSDKInitParam *pParam=new SXSDKInitParam(); SXSDKInitParam *pParam=new SXSDKInitParam();
pParam->nLogLevel=8; pParam->nLogLevel=8;
...@@ -235,7 +250,7 @@ int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat ...@@ -235,7 +250,7 @@ int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat
const int BufferSize = 1024 * 1024 * 2; // 缓冲区大小 const int BufferSize = 1024 * 1024 * 2; // 缓冲区大小
image.release(); // 释放之前的图像 image.release(); // 释放之前的图像
std::unique_ptr<unsigned char[]> pOutBuffer(new unsigned char[BufferSize]); // 智能指针管理内存 std::unique_ptr<unsigned char[]> pOutBuffer(new unsigned char[BufferSize]); // 智能指针管理内存
int pInOutBufferSize = 0; int pInOutBufferSize = -1;
int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer.get(), &pInOutBufferSize); int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer.get(), &pInOutBufferSize);
if (ret < 0 || pInOutBufferSize <= 0) { if (ret < 0 || pInOutBufferSize <= 0) {
...@@ -262,6 +277,71 @@ int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat ...@@ -262,6 +277,71 @@ int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat
return pInOutBufferSize; return pInOutBufferSize;
} }
int MediaFaceImage::getIPChannelInfo(int hk_Device,const QString& ip){
NET_DVR_IPPARACFG_V40 m_strIpparaCfg = {0};
m_strIpparaCfg.dwSize = sizeof(m_strIpparaCfg);
int bRet =SdkIpcSearchDevicesSyn(hk_Device, &m_strIpparaCfg);
if (bRet<0) {
qInfo() << "获取IP接入配置参数失败,错误码:" << NET_DVR_GetLastError();
return -1;
}
for (int iChannum = 0; iChannum < m_strIpparaCfg.dwDChanNum; ++iChannum) {
if (m_strIpparaCfg.struStreamMode[iChannum].byGetStreamType == 0) {
int channel = m_strIpparaCfg.struStreamMode[iChannum].uGetStream.struChanInfo.byIPID +
(m_strIpparaCfg.struStreamMode[iChannum].uGetStream.struChanInfo.byIPIDHigh * 256);
if (channel > 0 && m_strIpparaCfg.struStreamMode[iChannum].uGetStream.struChanInfo.byEnable == 1) {
char ip_addr[16] = {0};
strncpy(ip_addr, m_strIpparaCfg.struIPDevInfo[channel - 1].struIP.sIpV4, sizeof(ip_addr) - 1);
QString key = QString::fromUtf8(ip_addr).trimmed();
qInfo() << "匹配的IP:" << key << ",通道号:" << channel;
if (key == ip) {
qInfo() << "找到匹配的IP:" << key << ",通道号:" << channel;
return channel; // 返回找到的第一个匹配的通道号
}
}
}
}
qInfo() << "未找到匹配的IP:" << ip;
return -1; // 没有找到匹配的IP地址
}
int MediaFaceImage::FaceHkImageCallBack(int userID,int channelID,cv::Mat &image,QString &sSn){
image.release();
NET_DVR_JPEGPARA jpegpara = {0};
jpegpara.wPicQuality = 0;
jpegpara.wPicSize = 0xff; // 自动选择图片大小
const size_t M1 = 8 << 20; // 8 MB 缓冲区大小
std::unique_ptr<char[]> buffer(new char[M1]); // 使用智能指针管理缓冲区
unsigned int imageLen = 0;
bool result = NET_DVR_CaptureJPEGPicture_NEW(userID, 32 + channelID, &jpegpara,
buffer.get(), M1, &imageLen);
if (!result || imageLen == 0) {
qInfo() << "抓图失败, 错误码:" << NET_DVR_GetLastError();
return -1;
}
// 使用 std::vector 管理缓冲区数据
std::vector<uchar> imageBuffer(imageLen);
memcpy(imageBuffer.data(), buffer.get(), imageLen);
try {
// 解码 JPEG 数据为 cv::Mat
cv::Mat decodedImage = cv::imdecode(imageBuffer, cv::IMREAD_UNCHANGED);
if (decodedImage.empty()) {
qInfo() << QString("SN(%1): 图像解码失败").arg(sSn);
return -1;
}
// 转移解码后的图像到输出参数
image = std::move(decodedImage);
} catch (const cv::Exception &e) {
qInfo() << QString("SN(%1): 图像解码过程中捕获异常:%2").arg(sSn).arg(e.what());
return -1;
}
return imageLen; // 返回图像数据长度
}
int MediaFaceImage::CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer){ int MediaFaceImage::CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer){
static const int BufferSize = 1024 * 1024 * 2; // 2MB buffer size static const int BufferSize = 1024 * 1024 * 2; // 2MB buffer size
static unsigned char pOutBuffer[BufferSize]; static unsigned char pOutBuffer[BufferSize];
......
...@@ -6,6 +6,7 @@ ...@@ -6,6 +6,7 @@
#include "VidesData.h" #include "VidesData.h"
#include "Common.h" #include "Common.h"
#include "TaskRunnable.h" #include "TaskRunnable.h"
#include "HCNetSDK.h"
#include <memory> #include <memory>
#include <map> #include <map>
#include <QDebug> #include <QDebug>
...@@ -19,6 +20,9 @@ public: ...@@ -19,6 +20,9 @@ public:
void ParserImageData(const unsigned char* pData, int nDataLen, char* pJpg, int* nJpgLen, char* pJson); void ParserImageData(const unsigned char* pData, int nDataLen, char* pJpg, int* nJpgLen, char* pJson);
int FaceImageCallBack(XSDK_HANDLE hMedia,int nChannel,cv::Mat &image,QString &sSn); int FaceImageCallBack(XSDK_HANDLE hMedia,int nChannel,cv::Mat &image,QString &sSn);
int FaceHkImageCallBack(int userID,int channelID,cv::Mat &image,QString &sSn);
int CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer); int CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer);
int ToFile(const char* pFileName, const void* pData, int nLenght); int ToFile(const char* pFileName, const void* pData, int nLenght);
...@@ -27,6 +31,10 @@ public: ...@@ -27,6 +31,10 @@ public:
int SdkSearchDevicesSyn(std::map< QString,vides_data::localDeviceStatus*>& devices); int SdkSearchDevicesSyn(std::map< QString,vides_data::localDeviceStatus*>& devices);
int SdkIpcSearchDevicesSyn(int lUserID, NET_DVR_IPPARACFG_V40 *reqParams);
//通过IP获取到对应通道号
int getIPChannelInfo(int hk_Device,const QString& ip);
int SdkInit(QString &szConfigPath, QString &szTempPath); int SdkInit(QString &szConfigPath, QString &szTempPath);
std::map<int,CameraHandle*>getCurrentDevice(); std::map<int,CameraHandle*>getCurrentDevice();
......
#ifndef PARKINGSPACEINFO_H #ifndef PARKINGSPACEINFO_H
#define PARKINGSPACEINFO_H #define PARKINGSPACEINFO_H
#include "VidesData.h" #include "VidesData.h"
#include "RecognitionInfo.h" #include "RecognizedInfo.h"
#include <QMutex> #include <QMutex>
#include <QQueue> #include <QQueue>
class ParkingSpaceInfo { class ParkingSpaceInfo {
......
#include "RecognitionInfo.h" #include "RecognizedInfo.h"
RecognizedInfo::RecognizedInfo(){ RecognizedInfo::RecognizedInfo(){
......
...@@ -70,6 +70,7 @@ struct responseGb28181 { ...@@ -70,6 +70,7 @@ struct responseGb28181 {
int heartbeat_interval; int heartbeat_interval;
QString device_id; QString device_id;
QString channel_id; QString channel_id;
int sUdpPort;
}; };
struct responseArea { struct responseArea {
float bottom_right_corner_x; float bottom_right_corner_x;
...@@ -288,7 +289,7 @@ struct LicensePlateConfig { ...@@ -288,7 +289,7 @@ struct LicensePlateConfig {
struct UniformConfig { struct UniformConfig {
bool isOn; bool isOn;
int uniformColor; QString uniformColor;
int humanDetectionLen; int humanDetectionLen;
float carShapeConfidence; float carShapeConfidence;
quint64 updateAt; quint64 updateAt;
...@@ -310,6 +311,14 @@ struct MqttConfig { ...@@ -310,6 +311,14 @@ struct MqttConfig {
QString password; QString password;
quint64 updateAt; quint64 updateAt;
}; };
struct NvrConfig{
bool isOn;
QString ip;
QString username;
QString password;
uint16_t port;
};
struct responseConfig { struct responseConfig {
MainFormat mainFormat; MainFormat mainFormat;
...@@ -320,7 +329,7 @@ struct responseConfig { ...@@ -320,7 +329,7 @@ struct responseConfig {
UniformConfig uniformConfig; UniformConfig uniformConfig;
Camera camera; Camera camera;
MqttConfig mqttConfig; MqttConfig mqttConfig;
NvrConfig nvrConfig;
HumanConfig humanConfig; HumanConfig humanConfig;
}; };
...@@ -342,7 +351,7 @@ struct DetectionParams { ...@@ -342,7 +351,7 @@ struct DetectionParams {
int newFaceLen; int newFaceLen;
QString modelPaths; QString modelPaths;
float humanCarShapeConfidence; float humanCarShapeConfidence;
int uniformColor; QString uniformColor;
std::map<QString, QString> faceMaps; std::map<QString, QString> faceMaps;
int numberFaces; int numberFaces;
float faceConfidence; float faceConfidence;
...@@ -355,7 +364,6 @@ struct DetectionParams { ...@@ -355,7 +364,6 @@ struct DetectionParams {
float recConfidenceThreshold; ///< 识别置信度阈值 float recConfidenceThreshold; ///< 识别置信度阈值
}; };
inline bool isVirtualMachine() inline bool isVirtualMachine()
{ {
QString dmiPath; QString dmiPath;
......
...@@ -17,7 +17,9 @@ DEFINES += APP_VERSION=\\\"1.3.2\\\" ...@@ -17,7 +17,9 @@ DEFINES += APP_VERSION=\\\"1.3.2\\\"
DEFINES += QT_MESSAGELOGCONTEXT DEFINES += QT_MESSAGELOGCONTEXT
DEFINES += QT_NO_DEBUG_OUTPUT DEFINES += QT_NO_DEBUG_OUTPUT
QMAKE_LIBDIR += /usr/local/lib # 设置库路径
QMAKE_LIBDIR += /usr/local/lib \
/usr/local/lib/HCNetSDKCom
INCLUDEPATH+=/usr/local/include/opencv4 INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface INCLUDEPATH+=/usr/local/include/hyperface
...@@ -27,7 +29,7 @@ INCLUDEPATH+=/usr/local/include/human ...@@ -27,7 +29,7 @@ INCLUDEPATH+=/usr/local/include/human
INCLUDEPATH+=/usr/local/include/CImg INCLUDEPATH+=/usr/local/include/CImg
INCLUDEPATH+=/usr/local/include/mqtt INCLUDEPATH+=/usr/local/include/mqtt
INCLUDEPATH+=/usr/local/include/stb_image INCLUDEPATH+=/usr/local/include/stb_image
INCLUDEPATH+=/usr/local/include/hkws
...@@ -88,6 +90,7 @@ LIBS += -lopencv_core \ ...@@ -88,6 +90,7 @@ LIBS += -lopencv_core \
-lopencv_objdetect \ -lopencv_objdetect \
-lsohuman \ -lsohuman \
-lpaho-mqtt3a \ -lpaho-mqtt3a \
-lhcnetsdk \
# -lssl \ # -lssl \
# -lcrypto \ sudo apt-get install libjpeg-dev libpng-dev # -lcrypto \ sudo apt-get install libjpeg-dev libpng-dev
-lc \ -lc \
...@@ -100,7 +103,6 @@ SOURCES += \ ...@@ -100,7 +103,6 @@ SOURCES += \
mainwindow.cpp \ mainwindow.cpp \
LicensePlateRecognition.cpp \ LicensePlateRecognition.cpp \
MediaFaceImage.cpp \ MediaFaceImage.cpp \
RecognizedInfo.cpp \
Httpclient.cpp \ Httpclient.cpp \
HttpService.cpp \ HttpService.cpp \
TaskRunnable.cpp \ TaskRunnable.cpp \
...@@ -112,7 +114,8 @@ SOURCES += \ ...@@ -112,7 +114,8 @@ SOURCES += \
AlgorithmTaskManage.cpp \ AlgorithmTaskManage.cpp \
BaseAlgorithm.cpp \ BaseAlgorithm.cpp \
MqttSubscriber.cpp \ MqttSubscriber.cpp \
NonConnectedCameraHandle.cpp NonConnectedCameraHandle.cpp \
RecognizedInfo.cpp
HEADERS += \ HEADERS += \
Common.h \ Common.h \
...@@ -120,7 +123,6 @@ HEADERS += \ ...@@ -120,7 +123,6 @@ HEADERS += \
mainwindow.h \ mainwindow.h \
LicensePlateRecognition.h \ LicensePlateRecognition.h \
MediaFaceImage.h \ MediaFaceImage.h \
RecognitionInfo.h \
HttpClient.h \ HttpClient.h \
HttpService.h \ HttpService.h \
VidesData.h \ VidesData.h \
......
...@@ -8,6 +8,7 @@ ...@@ -8,6 +8,7 @@
#include "MediaFaceImage.h" #include "MediaFaceImage.h"
#include "AlgorithmTaskManage.h" #include "AlgorithmTaskManage.h"
#include "MqttSubscriber.h" #include "MqttSubscriber.h"
#include "HCNetSDK.h"
#include <algorithm> #include <algorithm>
#include <QString> #include <QString>
#include <QTextCodec> #include <QTextCodec>
...@@ -52,6 +53,10 @@ public: ...@@ -52,6 +53,10 @@ public:
CameraHandle* findHandle(QString sn); CameraHandle* findHandle(QString sn);
void realTimeUpdateDivConfig(QString &httpurl,QString &serialNumber);
bool getDeviceStatus(int UserId);
void modifySnMapIp(QString &sn,QString &ip); void modifySnMapIp(QString &sn,QString &ip);
void findSnMapIp(QString &sn,QString &ip); void findSnMapIp(QString &sn,QString &ip);
...@@ -86,6 +91,7 @@ public: ...@@ -86,6 +91,7 @@ public:
void setIsResetting(bool running); void setIsResetting(bool running);
int loginNetDvr(QString ip,QString userName,QString passWord,uint16_t port);
~MainWindow(); ~MainWindow();
signals: signals:
...@@ -127,5 +133,9 @@ private: ...@@ -127,5 +133,9 @@ private:
std::atomic<bool> isResetting; std::atomic<bool> isResetting;
int hk_Device;
bool nvr_status;
}; };
#endif // MAINWINDOW_H #endif // MAINWINDOW_H
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment