Commit b6009d23 by Amos

Merge branch 'release' into 'master'

merge Release

See merge request !102
parents 987a0177 ac65e986
......@@ -42,7 +42,7 @@ void AlgorithmTaskManage::initialize(int humanDetectionLen, int licensePlateLen,
}
void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,int &uniformColor) {
float carShapeConfidence,QString &uniformColor) {
for (int i = 0; i < humanDetectionLen; ++i) {
HumanDetection* human=new HumanDetection(modelPaths,carShapeConfidence);
human->setHuManParameter(uniformColor);
......@@ -113,7 +113,7 @@ void AlgorithmTaskManage::releaseResources(const vides_data::DetectionParams& pa
QString modelPath = params.modelPaths;
float humanCarShapeConfidence = params.humanCarShapeConfidence;
int uniformColor = params.uniformColor;
QString uniformColor = params.uniformColor;
std::map<QString, QString> faceMaps = params.faceMaps;
int numberFaces = params.numberFaces;
float faceConfidence = params.faceConfidence;
......
......@@ -26,7 +26,7 @@ public:
void initialize(int humanDetectionLen, int licensePlateLen, int faceLen,bool first,__uint8_t algorithmPermissions );
void initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,int &uniformColor);
float carShapeConfidence,QString &uniformColor);
void initLicensePlateManage(const QString &modelPaths,bool is_high,int maxNum,bool useHalf,
float boxThreshold,float nmsThreshold,float recThreshold);
......
......@@ -217,15 +217,20 @@ int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int n
}
}
}
}
int CameraHandle::sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener) {
return XSDK_DevSetAlarmListener(hDevice,bListener);
}
void CameraHandle::setHkDevice(bool hk_status,int hk_Device){
this->hk_Device=hk_Device;
this->hk_status=hk_status;
}
int CameraHandle::getChannel(){
return channel;
}
int CameraHandle::getHdevice() {
return hDevice;
}
......@@ -394,6 +399,10 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
{
qInfo() << "OnDevAlarmCallback[Dev:" << hObject << "][Event:" << szString << "]";
}
if(algorithmPermissions ==0x00){
return -1 ;
}
cv::Mat image;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch();
......@@ -401,9 +410,24 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
if (image.empty())
{
qInfo() << "Failed to read the image";
qInfo() << "FaceImageCallBack抓图失败,尝试调用FaceHkImageCallBack";
if(!hk_status){
qInfo() <<QString("SN(%1): callbackFunction通过IP获取到对应通道号失败").arg(sSn);
return -1;
}
int hk_channel= mediaFaceImage->getIPChannelInfo(hk_Device,QString(loginParam->sDevId));
if(hk_channel<0){
qInfo() <<QString("SN(%1): callbackFunction通过IP获取到对应通道号失败").arg(sSn);
return -1;
}
// 调用FaceHkImageCallBack抓图
mediaFaceImage->FaceHkImageCallBack(hk_Device, hk_channel, image, sSn);
// 再次检查抓图是否成功
if (image.empty()) {
qInfo() << "FaceHkImageCallBack抓图也失败";
return -1;
}
}
if (image.rows <= 0 || image.cols <= 0 || image.channels() <= 0) {
qInfo() << "图像尺寸或通道数不正确,需排查原因";
return -1;
......@@ -412,25 +436,8 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
updateImage(image, currentTime);
}
void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
if(hDevice<=0){
qInfo() << QString("SN(%1): 相机断线").arg(sSn);
return;
}
if (!semaphore.tryAcquire()) {
qInfo() << QString("SN(%1): callbackFunction:正在执行线程").arg(sSn);;
return ;
}
ScopeSemaphoreExit guard([this]() {
semaphore.release(); // 释放信号量
});
bool CameraHandle::threeConsecutiveImage(){
Common & instace= Common::getInstance();
cv::Mat image;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch();
int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image,sSn);
if (ret < 0) {
offlineCount++; // 累加计数器
if (offlineCount >= 3) { // 判断是否连续3次返回0
qInfo() <<QString("SN(%1): 设备离线").arg(sSn);
......@@ -448,11 +455,51 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
// TODO: 可以在此处更新设备状态、发送告警通知等
// 重置计数器,以便下次再次检测连续离线
offlineCount = 0;
return true;
}
return false;
}
void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
if(hDevice<=0){
qInfo() << QString("SN(%1): 相机断线").arg(sSn);
return;
}
if (!semaphore.tryAcquire()) {
qInfo() << QString("SN(%1): callbackFunction:正在执行线程").arg(sSn);
return ;
}
ScopeSemaphoreExit guard([this]() {
semaphore.release(); // 释放信号量
});
if(algorithmPermissions ==0x00){
return ;
}
cv::Mat image;
MediaFaceImage *mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch();
int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image,sSn);
if (ret <= 0) {
if(!hk_status){
if(threeConsecutiveImage())return;
}else {
int hk_channel= mediaFaceImage->getIPChannelInfo(hk_Device,QString(loginParam->sDevId));
if(hk_channel<0){
qInfo() <<QString("SN(%1): NVR=>sdkDevSnapSyn 通过IP获取到对应通道号失败").arg(sSn);
return ;
}
int retHk = mediaFaceImage->FaceHkImageCallBack(hk_Device, hk_channel, image, sSn);
if(retHk<0){
qInfo() <<QString("SN(%1): NVR=>FaceHkImageCallBack 句柄获取图片失败").arg(sSn);
if(threeConsecutiveImage())return;
}else {
// NVR抓图成功,则重置计数器
offlineCount = 0;
}
}
} else {
// 如果不连续,则重置计数器
//第一次相机抓图成功,重置计数器
offlineCount = 0;
}
if (image.empty())
......@@ -462,7 +509,6 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
}
if (image.rows <= 0 || image.cols <= 0 || image.channels() <= 0) {
qInfo()<<QString("SN(%1): 图像尺寸或通道数不正确,需排查原因").arg(sSn);
return ;
}
updateImage(image, currentTime);
......@@ -474,6 +520,7 @@ void CameraHandle::matToBase64(const cv::Mat &image, QByteArray &base64Data) {
cv::imencode(".jpg", image, buffer, params);
base64Data = QByteArray(reinterpret_cast<const char*>(buffer.data()), buffer.size()).toBase64();
}
void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat &frame, RecognizedInfo& newInfo,
int &result){
if (newInfo.getLicensePlate() != park->getCurrentPlate().getLicensePlate()) {
......@@ -511,7 +558,7 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
}else {
// 没有车辆或车辆在停车区域内部,移除队列
park->removeNoQueue();
qDebug()<<QString("SN(%1): no出场::%2").arg(sSn).arg(car_size);
qInfo()<<QString("SN(%1): no出场::%2").arg(sSn).arg(car_size);
}
}else{
//当前不为空,新车,新车入场,老车出场
......@@ -750,7 +797,6 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
if(image_save==1){
QString fileName= instace.getVideoOut().append(instace.getTimeString()+".jpg");
bool success = cv::imwrite(fileName.toStdString(), frame);
if (success) {
qInfo() << "图片已成功保存至:" << fileName;
} else {
......@@ -1164,7 +1210,7 @@ void CameraHandle::initWorkSpVMn(vides_data::responseGb28181 *gb28181, QString &
toJsonObject["uiAlarmStateLoseEnable"] = "0xFFFFFFFF";
toJsonObject["uiAlarmStateMotionEnable"] = "0xFFFFFFFF";
toJsonObject["uiAlarmStatePerformanceEnable"] = "0xFFFFFFFF";
toJsonObject["sUdpPort"] = 5060;
toJsonObject["sUdpPort"] = gb28181->sUdpPort;
// 生成 workSpWMn
QJsonDocument doc(toJsonObject);
......@@ -1312,6 +1358,7 @@ void CameraHandle::updateSdkDevSpvMn(vides_data::responseGb28181 *gb28181){
const char* szDeviceNO = config.szDeviceNO.ToString();
const char* szServerDn = config.szServerDn.ToString();
const char* szServerNo = config.szServerNo.ToString();
int u_port=config.sUdpPort.ToInt();
bool isEqual = (szCsIP == gb28181->sip_ip &&
sCsPort == gb28181->sip_port &&
szServerNo == gb28181->serial &&
......@@ -1320,6 +1367,7 @@ void CameraHandle::updateSdkDevSpvMn(vides_data::responseGb28181 *gb28181){
iHsIntervalTime == gb28181->heartbeat_interval &&
szConnPass == gb28181->password &&
szDeviceNO == gb28181->device_id &&
u_port == gb28181->sUdpPort &&
Camreaid == gb28181->channel_id);
if(!isEqual){
config.Camreaid.InitArraySize(64);
......@@ -1347,7 +1395,7 @@ void CameraHandle::updateSdkDevSpvMn(vides_data::responseGb28181 *gb28181){
config.szServerNo.SetValue(sz_ServerNo);
config.sCsPort.SetValue(gb28181->sip_port);
config.sUdpPort.SetValue(5060);
config.sUdpPort.SetValue(gb28181->sUdpPort);
QByteArray && bSzServerDn=gb28181->realm.toUtf8();
char* sz_ServerDn = bSzServerDn.data();
......
#ifndef CAMERAHANDLE_H
#define CAMERAHANDLE_H
#include "RecognitionInfo.h"
#include "RecognizedInfo.h"
#include "FaceReconitionHandle.h"
#include "HttpService.h"
#include "LicensePlateRecognition.h"
......@@ -50,9 +50,9 @@ public:
~CameraHandle();
int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
//int SdkMediaGetFaceImage(int hDevice, int nSeq, int nTimeout);
int sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener);
void setHkDevice(bool hk_status,int hk_Device);
int getHdevice();
int getChannel();
......@@ -83,6 +83,8 @@ public:
void licensePlateRecognitionResults(vides_data::requestLicensePlate &location);
bool threeConsecutiveImage();
void sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel);
void printWifi(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg);
......@@ -185,7 +187,10 @@ private slots:
private :
int hDevice;
int hk_Device;
bool hk_status;
int channel;
QString httpUrl;
SXSDKLoginParam *loginParam;
......
......@@ -51,7 +51,7 @@ cv::Mat FaceReconitionHandle::loadImage(const QString &path) {
qInfo() << "图像以CImg成功加载。";
return image;
}
image=loadImageWithSTB(path);
image=loadImageWithStb(path);
if (!image.empty()) {
qInfo() << "图像以stb_image成功加载。";
return image;
......@@ -204,7 +204,7 @@ void FaceReconitionHandle::featureRemoveMap(std::map<QString, QString> &maps){
}
cv::Mat FaceReconitionHandle::loadImageWithSTB(const QString& filename) {
cv::Mat FaceReconitionHandle::loadImageWithStb(const QString& filename) {
int width, height, channels;
QByteArray && b_filename = filename.toUtf8();
char* c_filename = b_filename.data();
......
......@@ -32,7 +32,7 @@ public:
cv::Mat loadQImageFromByteStream(const QString& filePath);
cv::Mat loadImageWithSTB(const QString& filename);
cv::Mat loadImageWithStb(const QString& filename);
bool getImageChanged()const;
......
#include "HttpService.h"
vides_data::responseStsCredentials HttpService::stsCredentials;
QString HttpService::sing_key;
HttpService::HttpService() {
......@@ -45,6 +46,7 @@ vides_data::response* HttpService::httpPostDeviceStatus(vides_data::requestDevic
vides_data::response *resp = new vides_data::response();
QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -78,6 +80,7 @@ vides_data::response* HttpService::httpPostRecord(int id,int recongnition_type,Q
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -99,6 +102,15 @@ vides_data::response* HttpService::httpPostRecord(int id,int recongnition_type,Q
void HttpService::setHttpUrl(const QString &httpUrl){
this->httpUrl=httpUrl;
}
void HttpService::setSingKey(const QString &key) {
sing_key = key;
}
QString HttpService::getSingKey() {
return sing_key;
}
vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_data::responseDeviceData&responseData) {
httpUrl.append("/api/v1.0/device/all");
vides_data::response *resp=new vides_data::response();
......@@ -106,6 +118,7 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d
query.addQueryItem("sn",serialNumber);
query.addQueryItem("new_token",QString::number(1));
QNetworkRequest request;
assembleSingHeaders(request);
QUrl url(httpUrl);
url.setQuery(query);
request.setUrl(url);
......@@ -222,6 +235,7 @@ vides_data::response *HttpService::httpLicensePlateRecognition(vides_data::reque
QByteArray bytearr= doc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response();
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -264,6 +278,7 @@ vides_data::response* HttpService::httpFindFaceReconition(QString &serialNumber,
query.addQueryItem("sn",serialNumber);
QUrl url(httpUrl);
url.setQuery(query);
assembleSingHeaders(request);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -305,6 +320,7 @@ vides_data::response *HttpService::httpPostUniforms(QByteArray &img,QString &id,
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -346,6 +362,7 @@ vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &h
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -394,6 +411,7 @@ vides_data::response *HttpService::httpPostFaceReconition(vides_data::requestFac
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
......@@ -419,6 +437,7 @@ vides_data::response*HttpService::httpFindGb28181Config(QString &serialNumber){
QNetworkRequest request;
QUrl url(httpUrl);
url.setQuery(query);
assembleSingHeaders(request);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -442,6 +461,8 @@ vides_data::response*HttpService::httpFindGb28181Config(QString &serialNumber){
response->heartbeat_interval = data["heartbeat_interval"].toInt();
response->device_id = data["device_id"].toString();
response->channel_id = data["channel_id"].toString();
QString local_port= data["local_port"].toString();
response->sUdpPort=local_port.toInt();
resp->data=response;
resp->msg=map["message"].toString();
}else{
......@@ -460,6 +481,7 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
QNetworkRequest request;
QUrl url(httpUrl);
url.setQuery(query);
assembleSingHeaders(request);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -531,7 +553,7 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
// 解析 uniformConfig
QJsonObject uniformConfigObj = dataObj["uniformConfig"].toObject();
config.uniformConfig.isOn = uniformConfigObj["isOn"].toBool();
config.uniformConfig.uniformColor = uniformConfigObj["uniformColor"].toInt();
config.uniformConfig.uniformColor = uniformConfigObj["uniformColor"].toString();
config.uniformConfig.humanDetectionLen = uniformConfigObj["humanDetectionLen"].toInt();
config.uniformConfig.updateAt = uniformConfigObj["updateAt"].toVariant().toULongLong();
config.uniformConfig.carShapeConfidence = uniformConfigObj["carShapeConfidence"].toVariant().toFloat();
......@@ -540,6 +562,13 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
config.humanConfig.isOn=humanConfigObj["isOn"].toBool();
config.humanConfig.updateAt = humanConfigObj["updateAt"].toVariant().toULongLong();
config.humanConfig.faceFrequency = humanConfigObj["faceFrequency"].toVariant().toUInt();
//解析nvrConfig;
QJsonObject nvrConfigObj= dataObj["nvrConfig"].toObject();
config.nvrConfig.isOn=nvrConfigObj["isOn"].toBool();
config.nvrConfig.ip=nvrConfigObj["ip"].toString();
config.nvrConfig.port=(unsigned short) nvrConfigObj["port"].toInt();
config.nvrConfig.password=nvrConfigObj["password"].toString();
config.nvrConfig.username=nvrConfigObj["username"].toString();
// 解析 devicesConfig
QJsonObject devicesConfigObj = dataObj["camera"].toObject();
......@@ -573,6 +602,23 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
}
void HttpService::assembleSingHeaders(QNetworkRequest &reques){
QString ts = QString::number(QDateTime::currentMSecsSinceEpoch() / 1000);
// 计算签名
QCryptographicHash hash(QCryptographicHash::Md5);
hash.addData(HttpService::getSingKey().toUtf8());
hash.addData(ts.toUtf8());
QString sign = hash.result().toHex();
// 设置头部信息
QMap<QString, QVariant> headers;
headers.insert("ts", ts);
headers.insert("sign", sign);
for (auto it = headers.begin(); it != headers.end(); ++it) {
reques.setRawHeader(it.key().toLatin1(), it.value().toString().toLatin1());
}
}
vides_data::response*HttpService::httpFindStream(QString &serialNumber){
httpUrl.append("/api/v1.0/stream");
......
......@@ -49,7 +49,13 @@ public:
vides_data::response *httpUploadFile(const QString &filePath,QString& accessKeyId,QString& accessKeySecret,
QString & bucketName,QString &securityToken);
void setHttpUrl(const QString & httpUrl);
static void setSingKey(const QString &key);
static QString getSingKey();
//组装验证头
void assembleSingHeaders(QNetworkRequest &reques);
vides_data::response *httpDeviceConfig(const QString &serialNumber,vides_data::responseConfig &config);
......@@ -63,5 +69,6 @@ private:
QMutex m_httpClientMutex;
static QString sing_key;
};
#endif // HTTPSERVICE_H
......@@ -72,7 +72,7 @@ void HumanDetection::draw_human_on_image(const cv::Mat& image, const TCV_ObjectL
}
}
void HumanDetection::setHuManParameter(int &uniformColor){
void HumanDetection::setHuManParameter(QString &uniformColor){
this->uniformColor=uniformColor;
}
......@@ -98,23 +98,38 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
int num = 0;
if (res == 0x00 || res == 0x02) {
QStringList colorList = uniformColor.split(',');
num = TCV_HumanDetectorGetNumOfHuman(detector);
qInfo() << QString("SN(%1): 获取人形数量:%2").arg(sSn).arg(num);
if (num == 0) return num; // 无行人检测结果,提前返回
if(res==0x02 && colorList.size()==0){
reMap[0x02] = 0; // 未穿工服的行人数量
reMap[0x00] = num; // 所有满足条件的行人数量
num = 0;
return num;
}
int count_no_uniform = 0; // 未穿工服的行人数量
int count_all = 0; // 所有满足条件的行人数量
std::vector<TCV_ObjectLocation> results(num);
TCV_HumanDetectorGetHumanLocation(detector, results.data(), num);
int count_no_uniform = 0; // 未穿工服的行人数量
int count_all = 0; // 所有满足条件的行人数量
QSet<int> uniformNumbers;
for (const QString &numStr : colorList) {
bool ok;
int num = numStr.toInt(&ok);
if (ok) {
uniformNumbers.insert(num);
}
}
for (const auto &person : results) {
int tenPlace = uniformColor / 10; // 十位
int onePlace = uniformColor % 10; // 个位
if (std::abs(person.y2 - person.y1) >= heightReference) {
++count_all;
//工服
if(person.uniform != tenPlace && person.uniform != onePlace){
if(!uniformNumbers.contains(person.uniform)){
vides_data::ParkingArea area;
area.topLeftCornerX=person.x1;
area.topLeftCornerY=person.y1;
......
......@@ -19,7 +19,7 @@ public:
int findHuManCar(const cv::Mat &source,int res,std::map<int,int>&reMap,QString &sSn,
float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate);
void setHuManParameter(int &uniformColor);
void setHuManParameter(QString &uniformColor);
void draw_human_on_image(const cv::Mat& image, const TCV_ObjectLocation* boxes, int size);
......@@ -27,7 +27,7 @@ private:
//高度基准
float heightReference;
int uniformColor;
QString uniformColor;
TCV_HumanDetector *detector;
......
......@@ -117,6 +117,21 @@ int MediaFaceImage::SdkSearchDevicesSyn(std::map<QString, vides_data::localDevic
}
int MediaFaceImage::SdkIpcSearchDevicesSyn( int lUserID, NET_DVR_IPPARACFG_V40 *reqParams){
int iRet = -1;
DWORD uiReturnLen = 0;
LONG lChannel = 0;
//get
iRet = NET_DVR_GetDVRConfig(lUserID, NET_DVR_GET_IPPARACFG_V40, lChannel, \
reqParams, sizeof(NET_DVR_IPPARACFG_V40), &uiReturnLen);
if (!iRet){
qInfo() <<QString("pyd---NET_DVR_GetDVRConfig NET_DVR_GET_IPPARACFG_V40 error.%1\n").arg(NET_DVR_GetLastError());
return -1;
}
return 0;
}
int MediaFaceImage::SdkInit(QString &szConfigPath, QString &szTempPath) {
SXSDKInitParam *pParam=new SXSDKInitParam();
pParam->nLogLevel=8;
......@@ -235,7 +250,7 @@ int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat
const int BufferSize = 1024 * 1024 * 2; // 缓冲区大小
image.release(); // 释放之前的图像
std::unique_ptr<unsigned char[]> pOutBuffer(new unsigned char[BufferSize]); // 智能指针管理内存
int pInOutBufferSize = 0;
int pInOutBufferSize = -1;
int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer.get(), &pInOutBufferSize);
if (ret < 0 || pInOutBufferSize <= 0) {
......@@ -262,6 +277,71 @@ int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat
return pInOutBufferSize;
}
int MediaFaceImage::getIPChannelInfo(int hk_Device,const QString& ip){
NET_DVR_IPPARACFG_V40 m_strIpparaCfg = {0};
m_strIpparaCfg.dwSize = sizeof(m_strIpparaCfg);
int bRet =SdkIpcSearchDevicesSyn(hk_Device, &m_strIpparaCfg);
if (bRet<0) {
qInfo() << "获取IP接入配置参数失败,错误码:" << NET_DVR_GetLastError();
return -1;
}
for (int iChannum = 0; iChannum < m_strIpparaCfg.dwDChanNum; ++iChannum) {
if (m_strIpparaCfg.struStreamMode[iChannum].byGetStreamType == 0) {
int channel = m_strIpparaCfg.struStreamMode[iChannum].uGetStream.struChanInfo.byIPID +
(m_strIpparaCfg.struStreamMode[iChannum].uGetStream.struChanInfo.byIPIDHigh * 256);
if (channel > 0 && m_strIpparaCfg.struStreamMode[iChannum].uGetStream.struChanInfo.byEnable == 1) {
char ip_addr[16] = {0};
strncpy(ip_addr, m_strIpparaCfg.struIPDevInfo[channel - 1].struIP.sIpV4, sizeof(ip_addr) - 1);
QString key = QString::fromUtf8(ip_addr).trimmed();
qInfo() << "匹配的IP:" << key << ",通道号:" << channel;
if (key == ip) {
qInfo() << "找到匹配的IP:" << key << ",通道号:" << channel;
return channel; // 返回找到的第一个匹配的通道号
}
}
}
}
qInfo() << "未找到匹配的IP:" << ip;
return -1; // 没有找到匹配的IP地址
}
int MediaFaceImage::FaceHkImageCallBack(int userID,int channelID,cv::Mat &image,QString &sSn){
image.release();
NET_DVR_JPEGPARA jpegpara = {0};
jpegpara.wPicQuality = 0;
jpegpara.wPicSize = 0xff; // 自动选择图片大小
const size_t M1 = 8 << 20; // 8 MB 缓冲区大小
std::unique_ptr<char[]> buffer(new char[M1]); // 使用智能指针管理缓冲区
unsigned int imageLen = 0;
bool result = NET_DVR_CaptureJPEGPicture_NEW(userID, 32 + channelID, &jpegpara,
buffer.get(), M1, &imageLen);
if (!result || imageLen == 0) {
qInfo() << "抓图失败, 错误码:" << NET_DVR_GetLastError();
return -1;
}
// 使用 std::vector 管理缓冲区数据
std::vector<uchar> imageBuffer(imageLen);
memcpy(imageBuffer.data(), buffer.get(), imageLen);
try {
// 解码 JPEG 数据为 cv::Mat
cv::Mat decodedImage = cv::imdecode(imageBuffer, cv::IMREAD_UNCHANGED);
if (decodedImage.empty()) {
qInfo() << QString("SN(%1): 图像解码失败").arg(sSn);
return -1;
}
// 转移解码后的图像到输出参数
image = std::move(decodedImage);
} catch (const cv::Exception &e) {
qInfo() << QString("SN(%1): 图像解码过程中捕获异常:%2").arg(sSn).arg(e.what());
return -1;
}
return imageLen; // 返回图像数据长度
}
int MediaFaceImage::CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer){
static const int BufferSize = 1024 * 1024 * 2; // 2MB buffer size
static unsigned char pOutBuffer[BufferSize];
......
......@@ -6,6 +6,7 @@
#include "VidesData.h"
#include "Common.h"
#include "TaskRunnable.h"
#include "HCNetSDK.h"
#include <memory>
#include <map>
#include <QDebug>
......@@ -19,6 +20,9 @@ public:
void ParserImageData(const unsigned char* pData, int nDataLen, char* pJpg, int* nJpgLen, char* pJson);
int FaceImageCallBack(XSDK_HANDLE hMedia,int nChannel,cv::Mat &image,QString &sSn);
int FaceHkImageCallBack(int userID,int channelID,cv::Mat &image,QString &sSn);
int CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer);
int ToFile(const char* pFileName, const void* pData, int nLenght);
......@@ -27,6 +31,10 @@ public:
int SdkSearchDevicesSyn(std::map< QString,vides_data::localDeviceStatus*>& devices);
int SdkIpcSearchDevicesSyn(int lUserID, NET_DVR_IPPARACFG_V40 *reqParams);
//通过IP获取到对应通道号
int getIPChannelInfo(int hk_Device,const QString& ip);
int SdkInit(QString &szConfigPath, QString &szTempPath);
std::map<int,CameraHandle*>getCurrentDevice();
......
#ifndef PARKINGSPACEINFO_H
#define PARKINGSPACEINFO_H
#include "VidesData.h"
#include "RecognitionInfo.h"
#include "RecognizedInfo.h"
#include <QMutex>
#include <QQueue>
class ParkingSpaceInfo {
......
#include "RecognitionInfo.h"
#include "RecognizedInfo.h"
RecognizedInfo::RecognizedInfo(){
......
......@@ -70,6 +70,7 @@ struct responseGb28181 {
int heartbeat_interval;
QString device_id;
QString channel_id;
int sUdpPort;
};
struct responseArea {
float bottom_right_corner_x;
......@@ -288,7 +289,7 @@ struct LicensePlateConfig {
struct UniformConfig {
bool isOn;
int uniformColor;
QString uniformColor;
int humanDetectionLen;
float carShapeConfidence;
quint64 updateAt;
......@@ -310,6 +311,14 @@ struct MqttConfig {
QString password;
quint64 updateAt;
};
struct NvrConfig{
bool isOn;
QString ip;
QString username;
QString password;
uint16_t port;
};
struct responseConfig {
MainFormat mainFormat;
......@@ -320,7 +329,7 @@ struct responseConfig {
UniformConfig uniformConfig;
Camera camera;
MqttConfig mqttConfig;
NvrConfig nvrConfig;
HumanConfig humanConfig;
};
......@@ -342,7 +351,7 @@ struct DetectionParams {
int newFaceLen;
QString modelPaths;
float humanCarShapeConfidence;
int uniformColor;
QString uniformColor;
std::map<QString, QString> faceMaps;
int numberFaces;
float faceConfidence;
......@@ -355,7 +364,6 @@ struct DetectionParams {
float recConfidenceThreshold; ///< 识别置信度阈值
};
inline bool isVirtualMachine()
{
QString dmiPath;
......
......@@ -17,7 +17,9 @@ DEFINES += APP_VERSION=\\\"1.3.2\\\"
DEFINES += QT_MESSAGELOGCONTEXT
DEFINES += QT_NO_DEBUG_OUTPUT
QMAKE_LIBDIR += /usr/local/lib
# 设置库路径
QMAKE_LIBDIR += /usr/local/lib \
/usr/local/lib/HCNetSDKCom
INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface
......@@ -27,7 +29,7 @@ INCLUDEPATH+=/usr/local/include/human
INCLUDEPATH+=/usr/local/include/CImg
INCLUDEPATH+=/usr/local/include/mqtt
INCLUDEPATH+=/usr/local/include/stb_image
INCLUDEPATH+=/usr/local/include/hkws
......@@ -88,6 +90,7 @@ LIBS += -lopencv_core \
-lopencv_objdetect \
-lsohuman \
-lpaho-mqtt3a \
-lhcnetsdk \
# -lssl \
# -lcrypto \ sudo apt-get install libjpeg-dev libpng-dev
-lc \
......@@ -100,7 +103,6 @@ SOURCES += \
mainwindow.cpp \
LicensePlateRecognition.cpp \
MediaFaceImage.cpp \
RecognizedInfo.cpp \
Httpclient.cpp \
HttpService.cpp \
TaskRunnable.cpp \
......@@ -112,7 +114,8 @@ SOURCES += \
AlgorithmTaskManage.cpp \
BaseAlgorithm.cpp \
MqttSubscriber.cpp \
NonConnectedCameraHandle.cpp
NonConnectedCameraHandle.cpp \
RecognizedInfo.cpp
HEADERS += \
Common.h \
......@@ -120,7 +123,6 @@ HEADERS += \
mainwindow.h \
LicensePlateRecognition.h \
MediaFaceImage.h \
RecognitionInfo.h \
HttpClient.h \
HttpService.h \
VidesData.h \
......
......@@ -7,8 +7,6 @@ MainWindow::MainWindow():isResetting(false)
{
sp_this=this;
LogHandler::Get().installMessageHandler();
QString inifile=QCoreApplication::applicationDirPath()+"/gameras.ini";
......@@ -19,7 +17,7 @@ MainWindow::MainWindow():isResetting(false)
QThreadPool* threadPool = QThreadPool::globalInstance();
threadPool->setMaxThreadCount(12);
threadPool->setMaxThreadCount(18);
QString httpurl;
QString profile=qSetting->value("cloudservice/profile","test").toString();
......@@ -30,23 +28,9 @@ MainWindow::MainWindow():isResetting(false)
}else{
httpurl=qSetting->value("cloudservice/pro_http").toString();
}
Common & instace= Common::getInstance();
QString serialNumber;
findLocalSerialNumber(serialNumber);
bool configFetched = false;
while (!configFetched) {
HttpService httpService(httpurl);
vides_data::response *res = httpService.httpDeviceConfig(serialNumber, config);
if (res->code != 0) {
qInfo() << "请求远程商户配置失败,重试中...";
instace.deleteObj(res);
QThread::sleep(5); // 等待5秒后重试
} else {
instace.deleteObj(res);
configFetched = true;
}
}
realTimeUpdateDivConfig(httpurl,serialNumber);
initCommon();
......@@ -55,11 +39,9 @@ MainWindow::MainWindow():isResetting(false)
int deleteLogfileTimer=config.timerSettings.deleteLogFileTimer;
deleteLogFileTimer->start(deleteLogfileTimer);
initFaceFaceRecognition();
int uniformColor=config.uniformConfig.uniformColor;
QString uniformColor=config.uniformConfig.uniformColor;
int humanDetectionLen=config.uniformConfig.humanDetectionLen;
int licensePlateLen=config.licensePlateConfig.licensePlateLen;
......@@ -80,8 +62,7 @@ MainWindow::MainWindow():isResetting(false)
algorithmTaskManage.initialize(humanDetectionLen,licensePlateLen,faceLen,true,0x00);
algorithmTaskManage.initHumanDetectionManage(modelPaths,carShapeConfidence,uniformColor);
algorithmTaskManage.initLicensePlateManage(modelPaths,is_high,maxNum,
useHalf,boxThreshold,nmsThreshold,recThreshold
);
useHalf,boxThreshold,nmsThreshold,recThreshold);
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
QString configPath = qSetting->value("devices/sz_config_path").toString();
......@@ -92,6 +73,14 @@ MainWindow::MainWindow():isResetting(false)
qInfo() << "sdk初始化失败";
return;
}
if (!NET_DVR_Init()) {
qInfo() << "初始化海康 SDK 失败!";
return;
}
this->hk_Device =-1;
this->nvr_status =config.nvrConfig.isOn;
connect(this, SIGNAL(shutdownSignals(QString,int)), this, SLOT(clearHandle(QString,int)),Qt::QueuedConnection);
......@@ -102,7 +91,9 @@ MainWindow::MainWindow():isResetting(false)
},Qt::QueuedConnection);
this->startCamera(httpurl);
QString sign_Key = qSetting->value("cloudservice/signKey","sign_key").toString();
HttpService::setSingKey(sign_Key);
float confidence=config.faceConfig.confidence;
int faceNumbers=config.faceConfig.faceNumbers;
......@@ -113,9 +104,6 @@ MainWindow::MainWindow():isResetting(false)
// 启动定时器
dePermissionSynTimer->start();
//dePermissionSynTimer->start(dePermissionTimer);
//vides_data::scanWiFiNetworks();
connect(&server, &QTcpServer::newConnection, this, &MainWindow::handleMatNewConnection);
......@@ -133,12 +121,73 @@ MainWindow::MainWindow():isResetting(false)
this->mqttConfig= config.mqttConfig;
runOrRebootMqtt(mqttConfig,httpurl,serialNumber);
}
void MainWindow::realTimeUpdateDivConfig(QString &httpurl,QString &serialNumber){
Common & instace= Common::getInstance();
findLocalSerialNumber(serialNumber);
bool configFetched = false;
while (!configFetched) {
HttpService httpService(httpurl);
vides_data::response *res = httpService.httpDeviceConfig(serialNumber, config);
if (res->code != 0) {
qInfo() << "请求远程商户配置失败,重试中...";
instace.deleteObj(res);
QThread::sleep(5); // 等待5秒后重试
} else {
instace.deleteObj(res);
configFetched = true;
}
}
}
void MainWindow::runOrRebootMqtt(vides_data::MqttConfig &mqtt_config,QString &httpUrl,QString &serialNumber){
MqttSubscriber* subscriber = MqttSubscriber::getInstance(this);
subscriber->init(mqtt_config,httpUrl,serialNumber);
subscriber->start();
}
int MainWindow::loginNetDvr(QString ip,QString userName,QString passWord,uint16_t port){
if(hk_Device<0 ){
NET_DVR_USER_LOGIN_INFO struLoginInfo = {0};
NET_DVR_DEVICEINFO_V40 struDeviceInfoV40 = {0};
struLoginInfo.bUseAsynLogin = FALSE;
int lUserID = -1;
struLoginInfo.wPort=8000;
QByteArray bIp = ip.toUtf8();
char* cDevid=bIp.data();
memcpy(struLoginInfo.sDeviceAddress,cDevid,129);
QByteArray byteName = userName.toUtf8();
char* cName=byteName.data();
memcpy(struLoginInfo.sUserName,cName,64);
QByteArray bytePassword = passWord.toUtf8();
char* cPassword=bytePassword.data();
memcpy(struLoginInfo.sPassword,cPassword,64);
// 打印 NET_DVR_USER_LOGIN_INFO 的内容
lUserID = NET_DVR_Login_V40(&struLoginInfo, &struDeviceInfoV40);
if (lUserID < 0) {
qInfo() << QString("登录设备失败,错误编码:%1").arg(NET_DVR_GetLastError()); return lUserID;
}
this->hk_Device=lUserID;
return lUserID;
}
}
bool MainWindow::getDeviceStatus(int UserId) {
if(UserId<0)return false;
bool devStatus =NET_DVR_RemoteControl(UserId, NET_DVR_CHECK_USER_STATUS, nullptr, 0);
if (devStatus) {
qInfo() << "NET_DVR_RemoteControl 设备在线";
} else {
qInfo() << "NET_DVR_RemoteControl 设备不在线";
}
return devStatus;
}
void MainWindow::divParameterUpdate(vides_data::responseConfig &cloudConfig,QString &httpUrl,QString &serialNumber ){
bool faceAlgorithm = false, licensePlateAlgorithm = false, uniformAlgorithm = false, timeChange = false;
AlgorithmTaskManage &algorithmTaskManage= AlgorithmTaskManage::getInstance();
......@@ -502,6 +551,7 @@ void MainWindow::clearOfflineCameraHandle(QString sDevId, int nDevPort) {
void MainWindow::setIsResetting(bool running) {
this->isResetting.store(running, std::memory_order_release);
}
//平台没有 盒子有 盒子关闭
void MainWindow::startCamera(const QString &httpurl){
if(this->isResetting.load(std::memory_order_acquire)){
......@@ -530,6 +580,8 @@ void MainWindow::startCamera(const QString &httpurl){
reStatus.mac=std::move(is_mac);
HttpService httpService(httpurl);
vides_data::response *re= httpService.httpFindCameras(serialNumber,devices);
if(re->code==0 || re->code==20004){
//本次搜索到设备列表
std::map<QString,vides_data::localDeviceStatus*> localDevices;
......@@ -553,6 +605,22 @@ void MainWindow::startCamera(const QString &httpurl){
instace.deleteObj(res_config);
return ;
}
config.nvrConfig = cloudConfig.nvrConfig;
if(config.nvrConfig.isOn && !getDeviceStatus(hk_Device) ){
int res= loginNetDvr(config.nvrConfig.ip,config.nvrConfig.username,config.nvrConfig.password,config.nvrConfig.port);
if( res<0 ){
nvr_status =false;
}else{
nvr_status =true;
}
}else if(!config.nvrConfig.isOn){
nvr_status = false;
}
else if(config.nvrConfig.isOn && getDeviceStatus(hk_Device)){
nvr_status = true;
}
instace.deleteObj(res_config);
divParameterUpdate(cloudConfig,nonConstHttpUrl,serialNumber);
......@@ -609,6 +677,8 @@ void MainWindow::startCamera(const QString &httpurl){
offlineCameraHandle->findFirmwareVersion(camera_info.firmware_version);
camera_info.mac=localDevice->mac;
reStatus.camera_info_list.push_front(camera_info);
offlineCameraHandle->setHkDevice(nvr_status,hk_Device);
qInfo() << QString("offlineCameraHandle->setHkDevice:%1,%2").arg(nvr_status).arg(hk_Device);
HttpService http_gb28181(httpurl);
vides_data::response *res=http_gb28181.httpFindGb28181Config(camera_info.sSn);
......@@ -656,8 +726,6 @@ void MainWindow::startCamera(const QString &httpurl){
}
instace.deleteObj(res);
updateLocalFace(httpurl);
instace.deleteObj(re);
......@@ -756,7 +824,6 @@ void MainWindow::initEncodeToString(QString &enCodeJson) {
// 添加 ExtraFormat 到 JSON 对象中
QJsonObject extraFormatObject;
QJsonObject videoObjectExtra = {
// {"BitRate", qSetting->value("ExtraFormat/Video.BitRate").toInt()},
{"BitRateControl", qSetting->value("ExtraFormat/Video.BitRateControl").toString()},
{"Compression", qSetting->value("ExtraFormat/Video.Compression").toString()},
{"FPS", qSetting->value("ExtraFormat/Video.FPS").toInt()},
......@@ -773,7 +840,6 @@ void MainWindow::initEncodeToString(QString &enCodeJson) {
// 添加 MainFormat 到 JSON 对象中
QJsonObject mainFormatObject;
QJsonObject videoObjectMain = {
// {"BitRate", qSetting->value("MainFormat/Video.BitRate").toInt()},
{"BitRateControl", qSetting->value("MainFormat/Video.BitRateControl").toString()},
{"Compression", qSetting->value("MainFormat/Video.Compression").toString()},
{"FPS", qSetting->value("MainFormat/Video.FPS").toInt()},
......@@ -1056,7 +1122,7 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data:
CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,image_save,heightReference,devConfig);
int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,3000);
qDebug() << QString("SN(%1): 句柄为%2").arg(parameter.sSn).arg(sdk_handle);
qInfo() << QString("SN(%1): 句柄为%2").arg(parameter.sSn).arg(sdk_handle);
if(sdk_handle<=0){
qInfo() << QString("SN(%1): 登录失败").arg(parameter.sSn);
......@@ -1064,12 +1130,10 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data:
}
mediaFaceImage->setMap(sdk_handle,cameraHandle);
initDevConfigSyn(cameraHandle,devConfig);
cameraHandle->sdkDevSetAlarmListener(sdk_handle,0);
qInfo() << QString("initCameras->setHkDevice:%1,%2").arg(nvr_status).arg(hk_Device);
cameraHandle->setHkDevice(nvr_status,hk_Device);
int synTime=devConfig.camera.devSnapSynTimer;
uint64 face_frequency=devConfig.humanConfig.faceFrequency;
float carConfidenceMax=devConfig.licensePlateConfig.carConfidenceMax;
......@@ -1078,9 +1142,6 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data:
cameraHandle->initSdkRealTimeDevSnapSyn(synTime,face_frequency);
cameraHandle->setCarConfidenceMaxAndMin(carConfidenceMax,carConfidenceMin);
// QString pwd="admin2024";
// QString sid="MERCURY_8C4F";
// cameraHandle->sdkWifi(pwd,sid);
vides_data::requestCameraInfo camera_info;
camera_info.sSn=parameter.sSn;
camera_info.ip_addr=parameter.sDevId;
......@@ -1106,13 +1167,6 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data:
return;
}
vides_data::responseGb28181 *gb281 = reinterpret_cast<vides_data::responseGb28181*>(res->data);
// QString stGb281;
// bool re= iniWorkSpVMn(gb281,stGb281,parameter.sSn);
// if(!re){
// QByteArray bGb =stGb281.toUtf8();
// const char* cGb=bGb.data();
// cameraHandle->sdkDevSpvMn(cGb);
// }
cameraHandle->updateSdkDevSpvMn(gb281);
instace.deleteObj(gb281);
instace.deleteObj(res);
......@@ -1170,8 +1224,10 @@ MainWindow::~MainWindow()
// 清空 handleMap
faceDetectionParkingPushs.clear();
LogHandler::Get().uninstallMessageHandler();
NET_DVR_Logout(hk_Device); // 注销登录
NET_DVR_Cleanup();
LogHandler::Get().uninstallMessageHandler();
}
void MainWindow::deleteLogFile(){
......
......@@ -8,6 +8,7 @@
#include "MediaFaceImage.h"
#include "AlgorithmTaskManage.h"
#include "MqttSubscriber.h"
#include "HCNetSDK.h"
#include <algorithm>
#include <QString>
#include <QTextCodec>
......@@ -52,6 +53,10 @@ public:
CameraHandle* findHandle(QString sn);
void realTimeUpdateDivConfig(QString &httpurl,QString &serialNumber);
bool getDeviceStatus(int UserId);
void modifySnMapIp(QString &sn,QString &ip);
void findSnMapIp(QString &sn,QString &ip);
......@@ -86,6 +91,7 @@ public:
void setIsResetting(bool running);
int loginNetDvr(QString ip,QString userName,QString passWord,uint16_t port);
~MainWindow();
signals:
......@@ -127,5 +133,9 @@ private:
std::atomic<bool> isResetting;
int hk_Device;
bool nvr_status;
};
#endif // MAINWINDOW_H
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment