Commit 25af28ce by 郭峰

Merge branch 'feature-1007488' into 'master'

Feature 1007488

See merge request !30
parents 22c70dd0 483aea37
......@@ -153,7 +153,7 @@ void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) {
} else if (scheType == 0x03) {
return schedulingAlgorithmTemplate(faceReconitionHandles, mtxFace);
} else {
qInfo() << "参数错误";
qDebug() << "参数错误";
return nullptr;
}
}
......@@ -170,10 +170,10 @@ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,s
if (selectedFaceReconition!=nullptr && !selectedFaceReconition->getImageChanged()) {
selectedFaceReconition->setIsRunning(true);
// 调用选定对象的doesItExistEmployee函数
qInfo() << "人脸识别算法抢到===>sn"<<sSn<<selectedFaceReconition;
selectedFaceReconition->doesItExistEmployee(source, faces);
qDebug() << "人脸识别算法抢到===>sn"<<sSn<<selectedFaceReconition;
selectedFaceReconition->doesItExistEmployee(sSn,source, faces);
} else {
qInfo() << "没有可用的selectedFaceReconition对象可以调度";
qDebug() << "没有可用的selectedFaceReconition对象可以调度";
return ;
}
}
......@@ -192,10 +192,10 @@ void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, Q
if (selectedLicensePlate!=nullptr) {
selectedLicensePlate->setIsRunning(true);
// 调用选定对象的findHuManCar函数
qInfo() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate;
qDebug() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate;
selectedLicensePlate->licensePlateNumber(source, lpNumber,plate, currentTime);
} else {
qInfo() << "没有可用的selectedLicensePlate对象可以调度";
qDebug() << "没有可用的selectedLicensePlate对象可以调度";
return ;
}
}
......@@ -214,11 +214,11 @@ int AlgorithmTaskManage::executeFindHuManCar(const cv::Mat &source, int res,
selectedHumanDetection->setIsRunning(true);
// 调用选定对象的findHuManCar函数
qInfo() << "人形调度算法抢到===>sn"<<sSn<<"res"<<selectedHumanDetection;
int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, heightReference,currentPlate);
qDebug() << "人形调度算法抢到===>sn"<<sSn<<"res"<<selectedHumanDetection;
int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, sSn,heightReference,currentPlate);
return detectionResult;
} else {
qInfo() << "没有可用的HumanDetection对象可以调度";
qDebug() << "没有可用的HumanDetection对象可以调度";
return -2;
}
}
......@@ -14,6 +14,7 @@
#include "Json_Header/NetWork_Wifi.h"
#include "Json_Header/SystemInfo.h"
#include "Json_Header/OPMachine.h"
#include "NonConnectedCameraHandle.h"
#include "mainwindow.h"
#include "ParkingSpaceInfo.h"
#include "hyper_lpr_sdk.h"
......@@ -31,6 +32,7 @@
#include <opencv2/opencv.hpp>
#include <QSemaphore>
#include <atomic>
#include <arpa/inet.h>
enum CAR_INFORMATION {
Exit, //出场
......@@ -61,7 +63,7 @@ public:
//相机参数更新
void cameraParameterUpdate(vides_data::responseConfig &cloudConfig);
void initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 face_frequency);
void initSdkRealTimeDevSnapSyn(int syn_timer,uint64 face_frequency);
void notificationUpdateImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence);
......@@ -85,10 +87,12 @@ public:
void printWifi(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg);
bool isWifiConnect(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg);
void setCarConfidenceMaxAndMin(float carConfidenceMax,float carConfidenceMin);
//设置相机连接的wifi
void sdkWifi(QString &pwd,QString &ssid);
bool sdkWifi(QString &pwd,QString &ssid);
//时间设置
void sdkDevSystemTimeZoneSyn(QString &time);
//录像设置
......@@ -107,7 +111,9 @@ public:
int deviceShutdown();
//获取固件版本
void findFirmwareVersion(QString &firmwareVersion);
//复位GB28181
int resetGb28181();
//获取ip
void findIp(QString &ip);
......@@ -127,6 +133,10 @@ public:
void setMediaHandle(int mediaHandle);
void initAlgorithmPermissions(__uint8_t algorithm);
void initFaceFrequency(uint64 face_frequency);
void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas);
bool compareLists(const std::list<vides_data::responseArea>& newAreas);
......@@ -158,7 +168,7 @@ signals:
void afterDownloadFile( int id,int recognitionType,QString ossUrl);
private slots:
void sdkRealTimeDevSnapSyn(int hDevice);
void sdkRealTimeDevSnapSyn();
void pushRecordToCloud(int id,int recognitionType,QString ossUrl);
//void releaseSemaphore();
......@@ -195,6 +205,9 @@ private :
int image_save;
std::atomic<uint64> faceCount;
std::atomic<bool> isOperateGb28181{false};
uint64 face_frequency;
__uint8_t algorithmPermissions;
......
......@@ -24,7 +24,7 @@ QString Common::generateSignature(const QString& accessKeySecret, const QString&
date + "\n" +
ossHeaders+ "\n" + // 添加'\n'分隔符,并确保ossHeaders末尾没有多余的空白
canonicalizedResource;
// 将密钥和消息转换为字节数组
QByteArray hmacKey = accessKeySecret.toUtf8();
QByteArray message = stringToSign.toUtf8();
......@@ -32,10 +32,10 @@ QString Common::generateSignature(const QString& accessKeySecret, const QString&
QMessageAuthenticationCode mac(QCryptographicHash::Sha1);
mac.setKey(hmacKey);
mac.addData(message);
QByteArray signature = mac.result().toBase64(); // 直接使用QMessageAuthenticationCode的结果
return QString(signature);
}
QString Common::getVideoOut(){
......@@ -62,6 +62,37 @@ void Common::setImages(QString images){
images.append("/");
this->images=images;
}
QString Common::DecIpToHexIp(const QString& decIp) {
// 将 IP 地址按点号分割成多个部分
QStringList parts = decIp.split(".");
// 检查 IP 地址是否有效(应包含 4 个部分)
if (parts.size() != 4) {
return QString(); // 如果无效,返回空字符串
}
QString hexIp; // 用于存储最终的十六进制表示
// 从最后一个部分开始遍历
for (int i = 3; i >= 0; --i) {
// 将每个部分转换为整数
bool ok;
int part = parts[i].toInt(&ok);
if (!ok) return QString(); // 如果转换失败,返回空字符串
// 将整数转换为十六进制字符串,并在必要时补零
QString hexPart = QString::number(part, 16).rightJustified(2, '0');
// 将十六进制字符串添加到结果中
hexIp.append(hexPart);
}
// 在结果前添加 "0x" 以表示十六进制
hexIp.prepend("0x");
return hexIp; // 返回最终的十六进制表示
}
QString Common::GetLocalIp() {
QString ipAddress;
QList<QHostAddress> list = QNetworkInterface::allAddresses();
......
......@@ -10,7 +10,7 @@
#include <QCryptographicHash>
#include <QMessageAuthenticationCode>
#include <QNetworkInterface>
#include <map>
#include <QDebug>
class Common
{
......@@ -38,12 +38,15 @@ public:
QString getVideoDownload();
void setVideoDownload(QString videoDownload);
QString getImages();
void setImages(QString images);
QString GetLocalIp();
QString DecIpToHexIp(const QString& decIp);
template <typename T>
const T& clamp(const T& v, const T& lo, const T& hi)
{
......@@ -61,6 +64,7 @@ private:
QString videoOut;
QString videoDownload;
QString images;
Common();
~Common();
......
......@@ -192,7 +192,7 @@ cv::Mat FaceReconitionHandle::loadImageFromByteStream(const QString& filePath) {
void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
void FaceReconitionHandle::doesItExistEmployee(const QString &sn,const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
ScopeSemaphoreExit streamGuard([this]() {
isRunning.store(false, std::memory_order_release);
......@@ -211,14 +211,14 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
if (ret != HSUCCEED) {
qInfo()<<QString("image handle error:%1").arg((long) imageSteamHandle,0,10);
qInfo()<<QString("SN(%1): image handle error:%2").arg(sn).arg((long)imageSteamHandle,0,10);
return ;
}
HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) {
qInfo()<<QString("search 未检测到人脸");
qInfo()<<QString("SN(%1): 未检测到人脸").arg(sn);
return ;
}
......@@ -227,11 +227,12 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
HInt32 featureNum;
HF_GetFeatureLength(ctxHandle, &featureNum);
for(int j=0;j< multipleFaceData.detectedNum; ++j){
qInfo()<<QString("doesItExistEmployee==>面部索引: %1").arg(j);
//qInfo()<<QString("doesItExistEmployee==>面部索引: %1").arg(j);
qDebug() << QString("SN(%1): 面部索引:%2").arg(sn).arg(j);
std::vector<float> newfeature(featureNum,0.0f);
ret = HF_FaceFeatureExtractCpy(ctxHandle, imageSteamHandle, multipleFaceData.tokens[j], newfeature.data());
if(ret != HSUCCEED) {
qInfo()<<QString("特征提取出错: %1").arg(ret);
qInfo() << QString("SN(%1): 特征提取出错:%2").arg(sn).arg(ret);
HF_ReleaseImageStream(imageSteamHandle);
return ;
}
......@@ -248,13 +249,12 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
HFloat confidence;
ret = HF_FeaturesGroupFeatureSearch(ctxHandle, feature, &confidence, &searchIdentity);
if (ret != HSUCCEED) {
qInfo()<<QString("搜索失败: %1").arg(ret);
qInfo() << QString("SN(%1): 搜索失败:%2").arg(sn).arg(ret);
return ;
}
qInfo()<<QString("搜索置信度: %1").arg(confidence);
qInfo()<<QString("匹配到的tag: %1").arg(searchIdentity.tag);
qInfo()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
qInfo() << QString("SN(%1): 搜索置信度:%2").arg(sn).arg(confidence);
qInfo() << QString("SN(%1): 匹配到的tag:%2").arg(sn).arg(searchIdentity.tag);
qInfo() << QString("SN(%1): 匹配到的customId:%2").arg(sn).arg(searchIdentity.customId);
// Face Pipeline
//printf("人脸特征数量: %d", faceNum);
if (confidence > configConfidence) {
......@@ -275,7 +275,7 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
// printf("image released");
} else {
//printf("image release error: %ld", ret);
qInfo()<<QString("image release error: %1").arg(ret);
qInfo() << QString("SN(%1): image release error:%2").arg(sn).arg(ret);
}
}
......@@ -33,7 +33,7 @@ public:
void setImageChanged(bool imageChanged);
void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face);
void doesItExistEmployee(const QString &sn,const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face);
void initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence);
......
......@@ -161,7 +161,7 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d
}else{
qInfo()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
resp->msg=m_httpClient.errorCode();
}
return resp;
}
......@@ -354,11 +354,6 @@ vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &h
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qInfo()<<"httpPostFacePopulation===>";
qInfo()<<m_httpClient.errorCode();
qInfo()<<m_httpClient.errorString();
qInfo()<<"httpPostFacePopulation===>end";
resp->code=2;
resp->msg=m_httpClient.errorCode();
}
......@@ -511,7 +506,6 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
QJsonObject faceConfigObj = dataObj["faceConfig"].toObject();
config.faceConfig.isOn = faceConfigObj["isOn"].toBool();
config.faceConfig.faceNumbers = faceConfigObj["faceNumbers"].toInt();
config.faceConfig.faceFrequency = faceConfigObj["faceFrequency"].toInt();
config.faceConfig.confidence = faceConfigObj["confidence"].toVariant().toFloat();
config.faceConfig.updateAt = faceConfigObj["updateAt"].toVariant().toULongLong();
config.faceConfig.faceLen=faceConfigObj["faceLen"].toInt();
......@@ -542,6 +536,7 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
QJsonObject humanConfigObj = dataObj["humanConfig"].toObject();
config.humanConfig.isOn=humanConfigObj["isOn"].toBool();
config.humanConfig.updateAt = humanConfigObj["updateAt"].toVariant().toULongLong();
config.humanConfig.faceFrequency = humanConfigObj["faceFrequency"].toVariant().toUInt();
// 解析 devicesConfig
QJsonObject devicesConfigObj = dataObj["camera"].toObject();
......
......@@ -7,7 +7,7 @@ HttpClient::HttpClient(QObject *parent)
{
m_networkAccessManager = new QNetworkAccessManager(this);
m_timer = new QTimer(this);
m_timer->setInterval(6000);
m_timer->setInterval(8000);
m_timer->setSingleShot(true);
connect(m_timer, SIGNAL(timeout()), &m_eventLoop, SLOT(quit()));
}
......
......@@ -77,7 +77,7 @@ void HumanDetection::setHuManParameter(int &uniformColor){
}
//0 人形 1 车形 2 工服
int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap, float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate) {
int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap,QString &sSn, float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate) {
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
TCV_CameraStream *stream = TCV_CreateCameraStream();
......@@ -100,8 +100,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
if (res == 0x00 || res == 0x02) {
num = TCV_HumanDetectorGetNumOfHuman(detector);
qInfo()<<"TCV_HumanDetectorGetNumOfHuman==>"<<num;
qInfo() << QString("SN(%1): 获取人形数量:%2").arg(sSn).arg(num);
if (num == 0) return num; // 无行人检测结果,提前返回
std::vector<TCV_ObjectLocation> results(num);
TCV_HumanDetectorGetHumanLocation(detector, results.data(), num);
......@@ -133,8 +132,6 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
}
reMap[0x02] = count_no_uniform; // 未穿工服的行人数量
reMap[0x00] = count_all; // 所有满足条件的行人数量
qInfo()<<"count_all==>"<<count_all;
qInfo()<<"count_no_uniform==>"<<count_no_uniform;
num = (res == 0x00) ? count_all : count_no_uniform;
}
......
......@@ -16,7 +16,7 @@ public:
float carShapeConfidence);
~HumanDetection();
int findHuManCar(const cv::Mat &source,int res,std::map<int,int>&reMap,
int findHuManCar(const cv::Mat &source,int res,std::map<int,int>&reMap,QString &sSn,
float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate);
void setHuManParameter(int &uniformColor);
......
......@@ -82,7 +82,6 @@ void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const
} else {
type = types[results.plates[i].type];
}
qInfo()<<QString("车牌号:%1").arg(results.plates[i].code);
QString plateResult = QString("第%1个,%2,车牌号:%3,置信度:%4,左上角点x坐标:%5,左上角点y坐标:%6,右下角点x坐标:%7,右下角点y坐标:%8")
.arg(i + 1).arg(type.c_str()).arg(results.plates[i].code)
.arg(results.plates[i].text_confidence).arg(results.plates[i].x1, 0, 'Q', 4)
......@@ -168,7 +167,6 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
newPlate.new_color=QString::fromStdString(type);
QString car_nuber=QString::fromUtf8(results.plates[i].code);
replaceWith1And0(car_nuber);
qInfo()<<"I O (i o)大小写替换为 1 0结果:==>"<<car_nuber;
newPlate.new_plate=car_nuber;
newPlate.text_confidence=results.plates[i].text_confidence;
vides_data::ParkingArea area;
......
......@@ -12,7 +12,7 @@
#include <QTextStream>
#include <QTextCodec>
const int g_logLimitSize = 5;
const int g_logLimitSize = 45;
struct LogHandlerPrivate {
LogHandlerPrivate();
......
......@@ -25,13 +25,13 @@ LogHandlerPrivate::LogHandlerPrivate() {
openAndBackupLogFile();
// 十分钟检查一次日志文件创建时间
renameLogFileTimer.setInterval(1000*2); // TODO: 可从配置文件读取
renameLogFileTimer.setInterval(1000 * 60 * 10); // TODO: 可从配置文件读取
renameLogFileTimer.start();
QObject::connect(&renameLogFileTimer, &QTimer::timeout, [this] {
QMutexLocker locker(&LogHandlerPrivate::logMutex);
openAndBackupLogFile(); // 打开日志文件
checkLogFiles(); // 检测当前日志文件大小
// autoDeleteLog(); // 自动删除30天前的日志
// autoDeleteLog(); // 自动删除30天前的日志
});
// 定时刷新日志输出到文件,尽快的能在日志文件里看到最新的日志
......@@ -91,10 +91,17 @@ void LogHandlerPrivate::openAndBackupLogFile() {
logFile->close();
delete logOut;
delete logFile;
QDate renameDate = logFileCreatedDate;
if (logFileCreatedDate == QDate::currentDate()) {
renameDate = QDate::currentDate().addDays(-1); // 设置为昨天的日期
}
QString newLogPath = logDir.absoluteFilePath(logFileCreatedDate.toString("yyyy-MM-dd.log"));
QFile::rename(logPath, newLogPath); // 将原始日志文件重命名为新的文件名
QString newLogPath = logDir.absoluteFilePath(renameDate.toString("yyyy-MM-dd.log"));
// QFile::rename(logPath, newLogPath); // 将原始日志文件重命名为新的文件名
QFile::copy(logPath, newLogPath);
QFile::remove(logPath);
logFile = new QFile(logPath);
logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : nullptr;
logFileCreatedDate = QDate::currentDate();
......@@ -105,8 +112,8 @@ void LogHandlerPrivate::openAndBackupLogFile() {
// 检测当前日志文件大小
void LogHandlerPrivate::checkLogFiles() {
// 如果 protocal.log 文件大小超过5M,重新创建一个日志文件,原文件存档为yyyy-MM-dd_hhmmss.log
if (logFile->size() > 1024*g_logLimitSize) {
// 如果 protocal.log 文件大小超过55M,重新创建一个日志文件,原文件存档为yyyy-MM-dd_hhmmss.log
if (logFile->size() > 1024*1024*g_logLimitSize) {
logFile->flush();
logFile->close();
delete logOut;
......@@ -114,7 +121,11 @@ void LogHandlerPrivate::checkLogFiles() {
QString logPath = logDir.absoluteFilePath("today.log"); // 日志的路径
QString newLogPath = logDir.absoluteFilePath(logFileCreatedDate.toString("yyyy-MM-dd.log"));
QFile::rename(logPath, newLogPath);
// QFile::rename(logPath, newLogPath);
QFile::copy(logPath, newLogPath);
QFile::remove(logPath);
logFile = new QFile(logPath);
logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : NULL;
logFileCreatedDate = QDate::currentDate();
......
......@@ -62,7 +62,7 @@ static int sdkInitCallback(XSDK_HANDLE hObject, int nMsgId, int nParam1,
auto taskCallBack=std::bind(&CameraHandle::callbackFunction, cameraHandle, hObject, qString);
auto taskRunnable = new TaskRunnable(taskCallBack, hObject,cameraHandle->getChannel(), RunFunction::SdkCallbackFunction);
threadPool->start(taskRunnable);
}
}
break;
......@@ -92,8 +92,8 @@ int MediaFaceImage::SdkSearchDevicesSyn(std::map<QString, vides_data::localDevic
for (int i = 0; i < nActualCount; i++)
{
qInfo() << QString("[%1][IP:%2.%3.%4.%5][SN:%6][Mac:%7]")
.arg(i)
.arg(pRet[i].HostIP.c[0])
.arg(i)
.arg(pRet[i].HostIP.c[0])
.arg(pRet[i].HostIP.c[1])
.arg(pRet[i].HostIP.c[2])
.arg(pRet[i].HostIP.c[3])
......@@ -230,55 +230,54 @@ int MediaFaceImage::ToFile(const char* pFileName, const void* pData, int nLength
// return pInOutBufferSize; // pOutBuffer由智能指针管理,此处无需手动释放
//}
int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat &image) {
int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat &image,QString &sSn) {
const int BufferSize = 1024 * 1024 * 2; // 缓冲区大小
image.release(); // 释放之前的图像
std::unique_ptr<unsigned char[]> pOutBuffer(new unsigned char[BufferSize]); // 智能指针管理内存
int pInOutBufferSize = 0;
int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer.get(), &pInOutBufferSize);
if (ret < 0 || pInOutBufferSize <= 0) {
qInfo() << "同步设备端抓图失败";
qInfo() << QString("SN(%1): 同步设备端抓图失败").arg(sSn);;
return -1;
}
// 使用 std::vector 管理缓冲区数据
std::vector<uchar> buffer(pInOutBufferSize);
memcpy(buffer.data(), pOutBuffer.get(), pInOutBufferSize);
try {
cv::Mat decodedImage = cv::imdecode(buffer, cv::IMREAD_UNCHANGED);
if (decodedImage.empty()) {
qInfo() << "图像解码失败";
qInfo() << QString("SN(%1): 图像解码失败").arg(sSn);;
return -1;
}
image = std::move(decodedImage);
} catch (const cv::Exception& e) {
qInfo() << "图像解码过程中捕获异常:" << e.what();
qInfo() << QString("SN(%1): 图像解码过程中捕获异常:%2").arg(sSn).arg(e.what());
return -1;
}
return pInOutBufferSize;
}
int MediaFaceImage::CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer){
static const int BufferSize = 1024 * 1024 * 2; // 2MB buffer size
static unsigned char pOutBuffer[BufferSize];
// 初始化为0,用于接收实际填充的大小
int pInOutBufferSize = 0;
// 尝试从设备获取快照数据
int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer, &pInOutBufferSize);
if (ret < 0 || pInOutBufferSize <= 0) {
qInfo() << "同步设备端抓图失败";
return -1; // 返回-1表示失败
}
// 用pOutBuffer里的数据初始化vector,复制数据到vector中
buffer = std::vector<uchar>(pOutBuffer, pOutBuffer + pInOutBufferSize);
// 返回实际填入Vector的数据大小
return pInOutBufferSize;
}
......@@ -18,7 +18,7 @@ public:
static MediaFaceImage* getInstance(); // 单例模式获取实例的静态成员函数
void ParserImageData(const unsigned char* pData, int nDataLen, char* pJpg, int* nJpgLen, char* pJson);
int FaceImageCallBack(XSDK_HANDLE hMedia,int nChannel,cv::Mat &image);
int FaceImageCallBack(XSDK_HANDLE hMedia,int nChannel,cv::Mat &image,QString &sSn);
int CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer);
int ToFile(const char* pFileName, const void* pData, int nLenght);
......@@ -34,6 +34,7 @@ public:
void clearCurrentDevice(int hObject);
void setMap(int &key,CameraHandle*value);
private:
MediaFaceImage(); // 构造函数声明为私有
~MediaFaceImage(); // 析构函数声明为私有
......
......@@ -24,29 +24,29 @@ void MqttSubscriber::init(vides_data::MqttConfig &config, QString &httpUrl, QStr
qInfo() << "客户端断开连接失败,返回编码" << rc;
}
}
// 销毁现有的MQTT客户端
MQTTAsync_destroy(&client);
client = nullptr; // 重置客户端指针
}
// 保存配置信息
this->config = config;
this->httpUrl = httpUrl;
this->serialNumber = serialNumber;
// 初始化新的MQTT客户端
QByteArray bAddress = config.address.toUtf8();
char* cAddress = bAddress.data();
QByteArray bClientId = config.clientId.toUtf8();
char* cClientId = bClientId.data();
int rc = MQTTAsync_create(&client, cAddress, cClientId, MQTTCLIENT_PERSISTENCE_NONE, nullptr);
if (rc != MQTTASYNC_SUCCESS) {
qInfo() << "MQTT客户端创建失败,返回编码" << rc;
return;
}
// 设置回调函数
MQTTAsync_setCallbacks(client, this, [](void* context, char* cause) {
static_cast<MqttSubscriber*>(context)->connectionLost(cause);
......@@ -57,7 +57,7 @@ void MqttSubscriber::init(vides_data::MqttConfig &config, QString &httpUrl, QStr
MqttSubscriber::MqttSubscriber(QObject* parent)
: QObject(parent), retryTimer(new QTimer(this)), client(nullptr) {
// 连接信号和槽
connect(this, &MqttSubscriber::connectionLostSignal, this, &MqttSubscriber::reconnectAndFetchConfig, Qt::QueuedConnection);
retryTimer->setInterval(10000); // 设置重试间隔为10秒
......@@ -76,18 +76,18 @@ MqttSubscriber::~MqttSubscriber() {
void MqttSubscriber::start() {
// 确保定时器停止
retryTimer->stop();
// 设置连接选项
MQTTAsync_connectOptions conn_opts = MQTTAsync_connectOptions_initializer;
conn_opts.keepAliveInterval = 20;
conn_opts.cleansession = 1;
QByteArray bUsername = config.username.toUtf8();
char* cUsername = bUsername.data();
QByteArray bPassword = config.password.toUtf8();
char* cPassword = bPassword.data();
conn_opts.username = cUsername;
conn_opts.password = cPassword;
conn_opts.onSuccess = [](void* context, MQTTAsync_successData* response) {
......@@ -97,7 +97,7 @@ void MqttSubscriber::start() {
static_cast<MqttSubscriber*>(context)->onConnectFailure(response);
};
conn_opts.context = this;
// 启动连接
int rc;
if ((rc = MQTTAsync_connect(client, &conn_opts)) != MQTTASYNC_SUCCESS) {
......@@ -116,7 +116,7 @@ void MqttSubscriber::onConnect(MQTTAsync_successData* response) {
static_cast<MqttSubscriber*>(context)->onSubscribeFailure(response);
};
opts.context = this;
QByteArray bTopic = config.topic.toUtf8();
char* cTopic = bTopic.data();
int rc;
......@@ -129,7 +129,7 @@ void MqttSubscriber::reconnectAndFetchConfig() {
qInfo() << "重新连接并获取配置";
Common& instace = Common::getInstance();
vides_data::responseConfig re_config;
// 使用 HttpService 从远程云端拉取配置
HttpService httpService(httpUrl); // 替换为实际的远程URL
vides_data::response* res = httpService.httpDeviceConfig(serialNumber, re_config);
......@@ -174,7 +174,7 @@ void MqttSubscriber::connectionLost(char* cause) {
int MqttSubscriber::messageArrived(char* topicName, int topicLen, MQTTAsync_message* m) {
QString topic(topicName);
QString payload = QString::fromUtf8(reinterpret_cast<const char*>(m->payload), m->payloadlen);
QJsonObject msgBodyOb;
vides_data::responseMqttData response;
QJsonDocument jsonDoc = QJsonDocument::fromJson(payload.toUtf8());
if (!jsonDoc.isNull() && jsonDoc.isObject()) {
......@@ -182,56 +182,101 @@ int MqttSubscriber::messageArrived(char* topicName, int topicLen, MQTTAsync_mess
response.msg_type = jsonObj["msg_type"].toInt();
response.sn = jsonObj["sn"].toString();
response.uniq = jsonObj["uniq"].toString();
if(response.msg_type==6 || response.msg_type==7){
// 解析 msg_body 字段
if (jsonObj.contains("msg_body")) {
QString msgBodyStr = jsonObj["msg_body"].toString();
// 将 msg_body 字符串转换为 QJsonDocument
QJsonDocument msgBodyDoc = QJsonDocument::fromJson(msgBodyStr.toUtf8());
if (!msgBodyDoc.isNull() && msgBodyDoc.isObject()) {
msgBodyOb = msgBodyDoc.object();
} else {
qInfo() << "Failed to parse msg_body as a JSON object.";
}
}
}
} else {
qInfo() << "Failed to parse JSON payload";
}
//1开 2关 3 重启 4 GB28181开 5 GB28181 关 6 一键配网 7 WIFI配留 8 复位GB28181
int res = -2;
CameraHandle* cameraHandle = MainWindow::sp_this->findHandle(response.sn);
if (cameraHandle == nullptr) {
qInfo() << "不存在该相机";
res = -1;
int hDevice=0;
if (cameraHandle == nullptr ) {
if(response.msg_type == 6){
hDevice=-1;
}else {
qInfo() << "不存在该相机";
res = -1;
}
} else {
if (response.msg_type == 2) {
res = cameraHandle->deviceShutdown();
} else if (response.msg_type == 3) {
res = cameraHandle->deviceReboot();
} else if (response.msg_type == 4) {
res = cameraHandle->updateSdkDevStatus(true);
} else if (response.msg_type == 5) {
res = cameraHandle->updateSdkDevStatus(false);
switch (response.msg_type) {
case 2: res = cameraHandle->deviceShutdown(); break;
case 3: res = cameraHandle->deviceReboot(); break;
case 4: res = cameraHandle->updateSdkDevStatus(true); break;
case 5: res = cameraHandle->updateSdkDevStatus(false); break;
case 6: hDevice = cameraHandle->getHdevice(); break;
case 7: {
if (msgBodyOb.contains("username") && msgBodyOb.contains("password")) {
QString username = msgBodyOb["username"].toString();
QString password = msgBodyOb["password"].toString();
res = (cameraHandle->sdkWifi(password, username) ? 0 : 0x01);
} else {
qInfo() << "IP username not found in msg_body.";
qInfo() << "IP password not found in msg_body.";
}
break;
}
case 8: res = (cameraHandle->resetGb28181() ? 0 : 0x01); break;
default: qInfo() << "Unknown message type"; break;
}
}
if (response.msg_type == 6) {
NonConnectedCameraHandle *connectedCameraHandle =NonConnectedCameraHandle::getInstance();
QString ipAddress ;
// 提取 ip 地址
if (msgBodyOb.contains("ip") && msgBodyOb["ip"].isString()) {
ipAddress = msgBodyOb["ip"].toString();
qInfo() << "IP Address: " << ipAddress;
} else {
qInfo() << "IP address not found in msg_body.";
}
res=connectedCameraHandle->distributionNetwork(ipAddress,response.sn,hDevice);
}
vides_data::requestMqttData request;
request.code = (res >= 0) ? 0 : 0x01;
request.uniq = response.uniq;
request.sn = response.sn;
sendSubscriptionConfirmation(request);
request.msg = (res >= 0) ?"成功":"失败";
request.uniq= response.uniq;
sendSubscriptionConfirmation(request,response.sn);
MQTTAsync_freeMessage(&m);
MQTTAsync_free(topicName);
return 1;
}
void MqttSubscriber::sendSubscriptionConfirmation(const vides_data::requestMqttData& response) {
QString responseTopic = "/thingshub/" + response.sn + "/device/post";
void MqttSubscriber::sendSubscriptionConfirmation(const vides_data::requestMqttData& response,QString &sn) {
QString responseTopic = "/thingshub/" +response.uniq+ "/device/post";
QByteArray bResponseTopic = responseTopic.toUtf8();
char* cResponseTopic = bResponseTopic.data();
qInfo() << "sendSubscriptionConfirmation" << cResponseTopic;
qInfo()<<QString("SN(%1): sendSubscriptionConfirmation->cResponseTopic%2").arg(sn).arg(cResponseTopic);
// response.sn
QJsonObject json;
json["code"] = response.code;
json["uniq"] = response.uniq;
json["msg"] = response.msg;
QJsonDocument jsonDoc(json);
QByteArray payload = jsonDoc.toJson(QJsonDocument::Compact);
MQTTAsync_message pubmsg = MQTTAsync_message_initializer;
pubmsg.payload = const_cast<char*>(payload.data());
pubmsg.payloadlen = payload.size();
pubmsg.qos = config.qos;
pubmsg.retained = 0;
MQTTAsync_responseOptions opts = MQTTAsync_responseOptions_initializer;
opts.onSuccess = [](void* context, MQTTAsync_successData* response) {
static_cast<MqttSubscriber*>(context)->onPublishSuccess(response);
......@@ -240,7 +285,7 @@ void MqttSubscriber::sendSubscriptionConfirmation(const vides_data::requestMqttD
static_cast<MqttSubscriber*>(context)->onPublishFailure(response);
};
opts.context = this;
int rc;
if ((rc = MQTTAsync_sendMessage(client, cResponseTopic, &pubmsg, &opts)) != MQTTASYNC_SUCCESS) {
qInfo() << "发送消息失败,返回编码" << rc;
......
......@@ -45,7 +45,7 @@ private:
void onPublishSuccess(MQTTAsync_successData* response);
void onPublishFailure(MQTTAsync_failureData* response);
void sendSubscriptionConfirmation(const vides_data::requestMqttData& response);
void sendSubscriptionConfirmation(const vides_data::requestMqttData& response,QString &sn);
static MqttSubscriber* instance;
};
......
#ifndef NONCONNECTEDCAMERAHANDLE_H
#define NONCONNECTEDCAMERAHANDLE_H
#include "XSDKPublic.h"
#include "XNetSDKSyn.h"
#include "XNetSDKDefine.h"
#include "VidesData.h"
#include "Common.h"
#include "ScopeSemaphoreExit.h"
#include "Json_Header/NetWork_NetCommon.h"
#include "Json_Header/NetWork_Wifi.h"
#include <arpa/inet.h>
class NonConnectedCameraHandle
{
public:
static NonConnectedCameraHandle* getInstance(); // 单例模式获取实例的静态成员函数
//有线修改相机ip
bool changeCameraIp(vides_data::localDevice &device);
//无线修改相机ip
bool wifiChangeIp(QString &Ip, bool is_connect, int h_device,QString &sn);
bool isWifiConnect(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg);
void printWifi(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg);
bool distributionNetwork(QString &ip,QString &sSn,int hDevice);
int sdkDevLoginSyn(QString sDevId, int nDevPort,
QString sUserName, QString sPassword, int nTimeout) ;
private:
NonConnectedCameraHandle(); // 构造函数声明为私有
~NonConnectedCameraHandle(); // 析构函数声明为私有
static NonConnectedCameraHandle* m_instance; // 指向实例的指针
};
#endif // NONCONNECTEDCAMERAHANDLE_H
......@@ -10,7 +10,6 @@ ParkingSpaceInfo::ParkingSpaceInfo(){
}
ParkingSpaceInfo::~ParkingSpaceInfo(){
qInfo() << "ParkingSpaceInfo:关闭";
}
void ParkingSpaceInfo::addQueue(RecognizedInfo &info){
QMutexLocker locker(&queueMutex);
......
......@@ -48,8 +48,6 @@ struct requestDeviceStatus
std::list<requestCameraInfo>camera_info_list;
requestDeviceStatus() {}
};
struct responseStsCredentials{
QString access_key_id;
QString access_key_secret;
......@@ -109,6 +107,19 @@ struct localDeviceStatus
QString password;
localDeviceStatus() {}
};
struct localDevice
{
QString sSn;
QString reachableIp;
bool isMask;
int nDevPort;
int TCPPort;
int ChannelNum;
QString UserName;
QString password;
localDevice() {}
};
struct requestFaceReconition
{
QString id;
......@@ -250,7 +261,6 @@ struct Camera {
struct FaceConfig {
bool isOn;
int faceNumbers;
uint64 faceFrequency;
float confidence;
int faceLen;
quint64 updateAt;
......@@ -261,7 +271,7 @@ struct LicensePlateConfig {
float carConfidenceMax;
float carConfidenceMin;
int licensePlateLen;
quint64 updateAt;
quint64 updateAt;
int maxNum; ///< 识别最大数量
bool useHalf; ///< 是否使用半精度推理模式
float boxConfThreshold; ///< 检测框阈值
......@@ -281,7 +291,7 @@ struct UniformConfig {
};
struct HumanConfig{
bool isOn;
int humanDetectionLen;
uint64 faceFrequency;
quint64 updateAt;
};
......@@ -313,11 +323,12 @@ struct responseMqttData{
uint8_t msg_type;
QString sn;
QString uniq;
QString msg_body;
};
struct requestMqttData{
QString sn;
int code;
QString msg;
int code;
QString uniq;
};
struct DetectionParams {
......@@ -400,16 +411,66 @@ inline QString getDefaultGateway() {
return gateway;
}
inline void convertQStringToSXSDK_IPAddress(const QString& ipString, SXSDK_IPAddress& hostIP) {
QHostAddress address(ipString);
quint32 ip = address.toIPv4Address();
hostIP.c[0] = (ip >> 24) & 0xFF;
hostIP.c[1] = (ip >> 16) & 0xFF;
hostIP.c[2] = (ip >> 8) & 0xFF;
hostIP.c[3] = ip & 0xFF;
}
inline bool isInSameSubnet(const QString &ip1, const QString &ip2, const QString &mask)
{
QHostAddress address1(ip1);
QHostAddress address2(ip2);
QHostAddress subnetMask(mask);
// 将IP地址和子网掩码从QHostAddress转换成quint32形式
quint32 addr1 = address1.toIPv4Address();
quint32 addr2 = address2.toIPv4Address();
quint32 maskAddr = subnetMask.toIPv4Address();
// 进行AND操作
quint32 result1 = addr1 & maskAddr;
quint32 result2 = addr2 & maskAddr;
// 比较结果
return result1 == result2;
}
// 获取本地 MAC 地址、子网掩码和网关IP
inline bool GetNetworkInfoByQNetworkInterface(QString &mac, QString &subnetMask, QString &gateway) {
QList<QNetworkInterface> interfaces = QNetworkInterface::allInterfaces();
foreach (QNetworkInterface interface, interfaces) {
if (interface.flags().testFlag(QNetworkInterface::IsUp) &&
interface.flags().testFlag(QNetworkInterface::IsRunning) &&
!interface.flags().testFlag(QNetworkInterface::IsLoopBack)) {
mac = interface.hardwareAddress();
QList<QNetworkAddressEntry> addressEntries = interface.addressEntries();
foreach (QNetworkAddressEntry entry, addressEntries) {
if (entry.ip().protocol() == QAbstractSocket::IPv4Protocol) {
subnetMask = entry.netmask().toString();
gateway = entry.broadcast().toString(); // 这里假设网关是广播地址
return true;
}
}
}
}
return false; // Return false if no suitable interface is found
}
inline bool pingAddress(const QString &address) {
QProcess process;
QString program = "ping";
QStringList arguments;
#ifdef Q_OS_WIN
#ifdef Q_OS_WIN
arguments << "-n" << "1" << address;
#else
#else
arguments << "-c" << "1" << address;
#endif
#endif
process.start(program, arguments);
if (!process.waitForStarted()) {
......@@ -425,11 +486,28 @@ inline bool pingAddress(const QString &address) {
QString output(process.readAllStandardOutput());
// 简单的 Ping 成功检查逻辑
#ifdef Q_OS_WIN
#ifdef Q_OS_WIN
return output.contains("TTL=");
#else
#else
return output.contains("1 packets transmitted, 1 received");
#endif
#endif
}
inline QString findReachableIp() {
QList<QHostAddress> ipAddressesList = QNetworkInterface::allAddresses();
for (const QHostAddress &address : ipAddressesList) {
if (address.protocol() == QAbstractSocket::IPv4Protocol && !address.isLoopback()) {
QString ipAddress = address.toString();
QString currentSubnet = ipAddress.left(ipAddress.lastIndexOf('.') + 1); // 返回子网部分
for (int i = 254; i >= 1; --i) { // 从 254 开始递减
QString ip = currentSubnet + QString::number(i);
if (!pingAddress(ip)) {
return ip;
}
}
}
}
return QString(); // 如果没有找到可用的 IP 地址,则返回空字符串
}
inline int GetCpuIdByAsm_arm(char* cpu_id)
......
......@@ -13,6 +13,8 @@ TEMPLATE = app
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.3.2\\\"
DEFINES += QT_MESSAGELOGCONTEXT
DEFINES += QT_NO_DEBUG_OUTPUT
QMAKE_LIBDIR += /usr/local/lib
......@@ -24,6 +26,9 @@ INCLUDEPATH+=/usr/local/include/human
INCLUDEPATH+=/usr/local/include/CImg
INCLUDEPATH+=/usr/local/include/mqtt
# 禁用所有警告
QMAKE_CXXFLAGS += -w
......@@ -103,7 +108,8 @@ SOURCES += \
FaceReconitionHandle.cpp \
AlgorithmTaskManage.cpp \
BaseAlgorithm.cpp \
MqttSubscriber.cpp
MqttSubscriber.cpp \
NonConnectedCameraHandle.cpp
HEADERS += \
Common.h \
......@@ -123,7 +129,8 @@ HEADERS += \
FaceReconitionHandle.h \
AlgorithmTaskManage.h \
BaseAlgorithm.h \
MqttSubscriber.h
MqttSubscriber.h \
NonConnectedCameraHandle.h
# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
......
......@@ -33,7 +33,7 @@ public:
explicit MainWindow();
void initCommon();
void setVideoPath(int flag, const QString& path);
void createDirectory(int flag,const QString& dirName, const QString& successMsg, const QString& failureMsg);
......@@ -43,15 +43,17 @@ public:
void initFaceFaceRecognition();
void initCameras(vides_data::cameraParameters &parameter, vides_data::responseConfig &devConfig, const std::list<vides_data::responseArea>&areas,std::list<vides_data::requestCameraInfo>&camera_info_list);
__uint8_t intToUint8t(bool faceAlgorithm,bool licensePlateAlgorithm,bool uniformAlgorithm,bool humanAlgorithm);
//盒子参数更新
void divParameterUpdate(vides_data::responseConfig &cloudConfig,QString &httpUrl,QString &serialNumber );
//盒子参数更新
void divParameterUpdate(vides_data::responseConfig &cloudConfig,QString &httpUrl,QString &serialNumber );
static MainWindow * sp_this;
CameraHandle* findHandle(QString sn);
void modifySnMapIp(QString &sn,QString &ip);
void findSnMapIp(QString &sn,QString &ip);
void sendJsonResponse(QTcpSocket* socket, int code, const QString& data, const QString& msg);
void sendEmptyResponse(QTcpSocket* socket);
......@@ -81,8 +83,10 @@ public:
// 过滤函数
void deleteCloudNotCamer (const std::map<QString,vides_data::localDeviceStatus*>& localDevices,
const std::list<vides_data::responseDeviceStatus>& devices);
void setIsResetting(bool running);
~MainWindow();
signals:
void shutdownSignals(QString sDevId, int nDevPort);
......@@ -101,7 +105,7 @@ private:
QSettings *qSetting;
QTimer *deleteLogFileTimer;
QTimer*dePermissionSynTimer;
QTcpServer server;
......@@ -118,9 +122,10 @@ private:
std::map<QString,CameraHandle*>faceDetectionParkingPushs;
vides_data::responseConfig config;
vides_data::MqttConfig mqttConfig;
vides_data::MqttConfig mqttConfig;
std::atomic<bool> isResetting;
};
#endif // MAINWINDOW_H
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment