Commit 476b9ed9 by “liusq”

增加析构同时添加关闭状态

parent 784d6dae
......@@ -39,8 +39,10 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch
}
CameraHandle::~CameraHandle() {
stopRequested_=true;
Common & instace= Common::getInstance();
dev_snap_syn_timer->stop();
QThreadPool::globalInstance()->waitForDone();
instace.deleteObj(dev_snap_syn_timer);
instace.deleteObj(loginParam);
instace.deleteObj(sxMediaFaceImageReq);
......@@ -52,7 +54,7 @@ CameraHandle::~CameraHandle() {
instace.deleteObj( iter->second);
}
parkMap.clear();
QThreadPool::globalInstance()->waitForDone();
}
......@@ -247,7 +249,6 @@ void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 fa
dev_snap_syn_timer->start(syn_timer);
}
void CameraHandle::sdkRealTimeDevSnapSyn(int hDevice) {
QThreadPool* threadPool = QThreadPool::globalInstance();
threadPool->setMaxThreadCount(8);
auto taskSyn = std::bind(&CameraHandle::sdkDevSnapSyn, this, hDevice, this->channel);
......@@ -325,6 +326,7 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
}
int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
if (stopRequested_) return -1;
if (!semaphore.tryAcquire()) {
qInfo() << "sdkDevSnapSyn:正在执行线程";
return -1;
......@@ -368,6 +370,7 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
}
void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
if (stopRequested_) return;
if(hDevice<=0){
qInfo() << "相机断线";
return;
......@@ -440,7 +443,7 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
//当前为空,立场
if(newInfo.getLicensePlate().length()<=0){
HumanDetection &humanDetection=HumanDetection::getInstance();
int car_size = humanDetection.findHuManCar(frame,1,detector);
int car_size = humanDetection.findHuManCar(frame,0x01,detector);
qDebug()<<sSn<<":"<<"当前车形数量:"<<car_size;
if(car_size<=0){
//出场
......@@ -491,18 +494,18 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
QByteArray imgs;
this->matToBase64(frame, imgs);
HttpService httpService(httpUrl);
int faSize =-1;
int faSize =0;
if ((algorithmPermissions & 0x01<<1) != 0) {
faSize=humanDetection.findHuManCar(frame,0,detector);
faSize=humanDetection.findHuManCar(frame,0x00,detector);
if(currentFace!=faSize){
if(currentFace!= faSize){
if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){
vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,sSn,currentTime);
if (resp->code!= 0) {
qInfo()<<"人数变化推送信息推送失败";
}
instace.deleteObj(resp);
currentFace=faSize;
currentFace = faSize;
}
}
}
......@@ -511,7 +514,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
if(uniforms>0 ){
if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){
httpService.setHttpUrl(httpUrl);
vides_data::response* resp=httpService.httpPostUniforms(imgs,faSize,sSn,currentTime);
vides_data::response* resp=httpService.httpPostUniforms(imgs,uniforms,sSn,currentTime);
if (resp->code!= 0) {
qInfo()<<"推送未穿工服人数失败";
}
......
......@@ -126,6 +126,7 @@ private :
int hDevice;
int channel;
QString httpUrl;
bool stopRequested_ = false;
SXSDKLoginParam *loginParam;
SXMediaFaceImageReq *sxMediaFaceImageReq;
......
......@@ -33,6 +33,8 @@ public:
cv::Mat loadImage(const QString &path);
cv::Mat loadImageFromByteStream(const QString& filePath);
void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face);
void initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence);
......
......@@ -2,6 +2,9 @@
#include <QImage>
#include <QThread>
#include <iostream>
#define cimg_display 0
#include "CImg.h"
......@@ -20,31 +23,6 @@ FaceReconition::~FaceReconition(){
FaceReconition* FaceReconition::instance = nullptr;
//cv::Mat FaceReconition::loadImage(const QString &path) {
// // 尝试使用OpenCV直接加载图像
// std::string stdPath = path.toStdString(); // 将路径转换为std::string
// cv::Mat image = cv::imread(stdPath, cv::IMREAD_COLOR); // 尝试加载图像
// if (!image.empty()) {
// qDebug() << "图像以OpenCV成功加载。";
// return image;
// }
// // 使用OpenCV加载失败,尝试使用QImage
// qDebug() << "使用OpenCV加载图像失败,尝试QImage转换。";
// QImage qimg(path);
// if (qimg.isNull()) {
// qDebug() << "QImage也无法加载图像,检查文件路径或文件损坏。";
// return cv::Mat(); // 返回空的cv::Mat对象
// }
// // 转换QImage格式为RGB888
// QImage converted = qimg.convertToFormat(QImage::Format_RGB888);
// cv::Mat mat(converted.height(), converted.width(), CV_8UC3, const_cast<uchar*>(converted.bits()), static_cast<size_t>(converted.bytesPerLine()));
// // 不进行颜色转换,直接返回
// return mat;
//}
cv::Mat FaceReconition::loadImage(const QString &path) {
// 尝试使用OpenCV直接加载图像
std::string stdPath = path.toStdString();
......@@ -53,7 +31,7 @@ cv::Mat FaceReconition::loadImage(const QString &path) {
qDebug() << "图像以OpenCV成功加载。";
return image;
}
return loadImageFromByteStream(path);
}
......@@ -69,7 +47,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
QString bPath = QCoreApplication::applicationDirPath() + "/model_zip/T1_5";
#else
#error "不支持的架构"
#endif
QByteArray && bypath = bPath.toUtf8();
char* spath = bypath.data();
......@@ -87,7 +65,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
customIds.clear();
int i = 0;
qInfo()<< "加载图像size: "<<maps.size();
for (auto it = maps.begin(); it != maps.end(); ++it,++i) {
const QString& key = it->first;
QString& value = it->second;
......@@ -102,7 +80,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
imageData.width = image.cols;
imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
this->configConfidence=confidence;
......@@ -111,25 +89,25 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) {
qInfo() << QString("initSourceImageMap:未检测到人脸: %1").arg(key);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
HF_FaceFeature feature = {0};
ret = HF_FaceFeatureExtract(ctxHandle, imageSteamHandle, multipleFaceData.tokens[0], &feature);
if (ret != HSUCCEED) {
qInfo() << QString("特征提取出错: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
char* tagName = new char[key.size() + 1];
std::strcpy(tagName, key.toStdString().c_str());
HF_FaceFeatureIdentity identity = {0};
......@@ -137,16 +115,16 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
identity.customId = i;
customIds.push_back( identity.customId);
identity.tag = tagName;
ret = HF_FeaturesGroupInsertFeature(ctxHandle, identity);
if (ret != HSUCCEED) {
qInfo() << QString("插入失败: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
delete[] tagName;
ret = HF_ReleaseImageStream(imageSteamHandle);
if (ret == HSUCCEED) {
imageSteamHandle = nullptr;
......@@ -165,9 +143,11 @@ int FaceReconition::featureRemove(){
}
}
}
cv::Mat FaceReconition::loadImageFromByteStream(const QString& filePath) {
try {
try {
// 使用 CImg 读取 JPEG 图像
QByteArray bPath =filePath.toUtf8();
const char* ctr=bPath.data();
......@@ -176,34 +156,29 @@ cv::Mat FaceReconition::loadImageFromByteStream(const QString& filePath) {
// 将 CImg 对象转换为 OpenCV 的 Mat 格式
int width = cimg_image.width();
int height = cimg_image.height();
int channels = cimg_image.spectrum(); // 图像通道数
cv::Mat mat(height, width, CV_8UC3);
// 创建一个对应的空 Mat 对象
cv::Mat opencv_image(height, width, channels == 1 ? CV_8UC1 : CV_8UC3);
// 复制图像数据到 Mat 对象
cimg_forXY(cimg_image, x, y) {
if (channels == 1) {
opencv_image.at<unsigned char>(y, x) = cimg_image(x, y, 0, 0);
} else {
// 注意 OpenCV 默认使用 BGR 而不是 RGB
cv::Vec3b& opencv_pixel = opencv_image.at<cv::Vec3b>(y, x);
opencv_pixel[2] = cimg_image(x, y, 0, 0); // Red
opencv_pixel[1] = cimg_image(x, y, 0, 1); // Green
opencv_pixel[0] = cimg_image(x, y, 0, 2); // Blue
}
// 注意OpenCV默认是BGR顺序
// CImg中像素的存取方式是 (x, y, z, c) 其中c是颜色通道
mat.at<cv::Vec3b>(y, x)[2] = cimg_image(x, y, 0, 0); // R
mat.at<cv::Vec3b>(y, x)[1] = cimg_image(x, y, 0, 1); // G
mat.at<cv::Vec3b>(y, x)[0] = cimg_image(x, y, 0, 2); // B
}
return opencv_image;
return mat;
} catch (const CImgException& e) {
qDebug() << "CImg Error: " << e.what();
return cv::Mat();;
return cv::Mat();;
} catch (const cv::Exception& e) {
qDebug() << "OpenCV Error: " << e.what();
return cv::Mat(); ;
}
return cv::Mat();
}
void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
//QReadLocker locker(&rwLock);
HResult ret;
......@@ -214,7 +189,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
imageData.width = source.cols;
imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
if (ret != HSUCCEED) {
......@@ -223,12 +198,12 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
}
HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) {
qDebug()<<QString("search 未检测到人脸");
return ;
}
std::vector<std::vector<float>> features;
// 被搜索的目标这边推荐使用拷贝式的接口来获取特征向量
HInt32 featureNum;
......@@ -257,7 +232,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
qInfo()<<QString("搜索失败: %1").arg(ret);
return ;
}
qDebug()<<QString("搜索置信度: %1").arg(confidence);
qDebug()<<QString("匹配到的tag: %1").arg(searchIdentity.tag);
qDebug()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
......@@ -272,7 +247,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
newface.height=multipleFaceData.rects[rect].height;
faces.push_back(newface);
}
rect++;
}
ret = HF_ReleaseImageStream(imageSteamHandle);
......
......@@ -3,6 +3,7 @@
vides_data::responseStsCredentials HttpService::stsCredentials;
HttpService::HttpService() {
}
HttpService::HttpService(QString httpUrl) {
this->httpUrl=httpUrl;
......
......@@ -15,7 +15,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector
// 执行一帧目标检测
TCV_HumanDetectorProcessFrame(detector, stream);
int num=-1;
int num=0;
if(res==0x00 || res==0x02){
num= TCV_HumanDetectorGetNumOfHuman(detector);
if (num > 0 && res==0x02) {
......@@ -32,11 +32,11 @@ int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector
}
num=num_uniforms;
}
qDebug() << (res == 0 ? "Number of people detected:" : "Number of people with uniform == 0 detected:") << num;
qDebug() << (res == 0 ? "findHuManCar 检测到的人数:" : "findHuManCar 未穿工服的人数:") << num;
}else if (res==0x01) {
num=TCV_HumanDetectorGetNumOfCar(detector);
qDebug() << "Number of cars detected:" << num;
qDebug() << "findHuManCar 检测到的汽车数量:" << num;
}else {
qDebug() << "参数错误";
......
#include "ScopeSemaphoreExit.h"
ScopeSemaphoreExit::ScopeSemaphoreExit(std::function<void()> onExit)
: onExit_(onExit) {}
ScopeSemaphoreExit::ScopeSemaphoreExit(std::function<void()> onExit)
: onExit_(onExit) {}
ScopeSemaphoreExit::~ScopeSemaphoreExit() {
if (onExit_) onExit_();
}
ScopeSemaphoreExit::~ScopeSemaphoreExit() {
if (onExit_) onExit_();
}
......@@ -272,23 +272,63 @@ inline bool pingAddress(const QString &address) {
return output.contains("1 packets transmitted, 1 received");
}
inline int GetCpuIdByAsm_arm(char* cpu_id)
{
FILE *fp = fopen("/proc/cpuinfo", "r");
if(nullptr == fp)
{
qDebug()<<"failed to open cpuinfo";
return -1;
}
inline QString getCpuSerialNumber() {
QProcess process;
// 启动一个进程运行shell命令
process.start("sh", QStringList() << "-c" << "cat /proc/cpuinfo | grep Serial");
// 等待命令执行完成
process.waitForFinished();
// 读取命令的标准输出
QString output = process.readAllStandardOutput();
char cpuSerial[100] = {0};
while(!feof(fp))
{
memset(cpuSerial, 0, sizeof(cpuSerial));
fgets(cpuSerial, sizeof(cpuSerial) - 1, fp); // leave out \n
char* pch = strstr(cpuSerial,"Serial");
if (pch)
{
char* pch2 = strchr(cpuSerial, ':');
if (pch2)
{
memmove(cpu_id, pch2 + 2, strlen(cpuSerial));
QString serialNumber;
if (!output.isEmpty()) {
// 已经确保了输出仅包含 Serial 行,所以直接分割并提取
serialNumber = output.split(":").last().trimmed();
break;
}
else
{
fclose(fp);
return -1;
}
}
}
fclose(fp);
return 0;
}
return serialNumber;
inline QString getCpuSerialNumber() {
QFile file("/proc/cpuinfo");
if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) {
qDebug() << "无法打开 /proc/cpuinfo 文件";
return QString();
}
QTextStream in(&file);
QString line;
while (!in.atEnd()) {
line = in.readLine();
if (line.startsWith("Serial")) {
QStringList parts = line.split(":");
if (parts.size() > 1) {
QString serial = parts.at(1).trimmed();
return serial;
}
}
}
return QString();
}
}
......
......@@ -11,46 +11,46 @@ TEMPLATE = app
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.0.0\\\"
#QMAKE_LIBDIR += /usr/local/lib
#INCLUDEPATH+=/usr/local/include/opencv4
#INCLUDEPATH+=/usr/local/include/hyperface
#INCLUDEPATH+=/usr/local/include/hyper
#INCLUDEPATH+=/usr/local/include/XNetSDK
#INCLUDEPATH+=/usr/local/include/human
#INCLUDEPATH+=/usr/local/include/CImg
unix:contains(QMAKE_HOST.arch, x86_64) {
QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
}
unix:contains(QMAKE_HOST.arch, arm) {
QMAKE_LIBDIR += /usr/local/lib
}
# 根据编译器类型选择库路径和头文件路径
unix: {
# x86 架构
contains(QMAKE_HOST.arch, x86_64) {
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/CImg
}
# ARM 架构
contains(QMAKE_HOST.arch, arm) {
INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human
}
}
DEFINES += APP_VERSION=\\\"1.0.1\\\"
QMAKE_LIBDIR += /usr/local/lib
INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human
INCLUDEPATH+=/usr/local/include/CImg
#unix:contains(QMAKE_HOST.arch, x86_64) {
# QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
#}
#unix:contains(QMAKE_HOST.arch, arm) {
# QMAKE_LIBDIR += /usr/local/lib
#}
## 根据编译器类型选择库路径和头文件路径
#unix: {
# # x86 架构
# contains(QMAKE_HOST.arch, x86_64) {
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/CImg
# }
# # ARM 架构
# contains(QMAKE_HOST.arch, arm) {
# INCLUDEPATH+=/usr/local/include/opencv4
# INCLUDEPATH+=/usr/local/include/hyperface
# INCLUDEPATH+=/usr/local/include/hyper
# INCLUDEPATH+=/usr/local/include/XNetSDK
# INCLUDEPATH+=/usr/local/include/human
# }
#}
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
......@@ -76,7 +76,7 @@ LIBS += -lopencv_core \
-lopencv_objdetect \
-lsohuman \
# -lssl \
# -lcrypto \
# -lcrypto \ sudo apt-get install libjpeg-dev libpng-dev
-lc \
-lXNetSDK
#-lz
......
......@@ -284,7 +284,17 @@ void MainWindow::findLocalSerialNumber(QString &serialNumber){
if(localSn.length()>0){
serialNumber=localSn;
}else {
serialNumber =vides_data::getCpuSerialNumber();
QString number= vides_data::getCpuSerialNumber();
if(number.length()<=0){
char cpu_id_arm[40] = {0};
int result = vides_data::GetCpuIdByAsm_arm(cpu_id_arm); //EC-06-08-00-FF-FB-8B-1F
if (result>= 0)
{
number=cpu_id_arm;
}
}
serialNumber =number.trimmed();
localSn=serialNumber;
}
}
......@@ -295,7 +305,7 @@ void MainWindow::clearHandle(QString sDevId, int nDevPort){
auto it = this->faceDetectionParkingPushs.find(key);
if (it != this->faceDetectionParkingPushs.end()) {
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qDebug()<<"clearHandle:离线的设备是:"<<key;
CameraHandle* offlineCameraHandle = it->second; // 注意使用->second获取值
int hDevice=offlineCameraHandle->getHdevice();
......@@ -313,14 +323,14 @@ void MainWindow::startCamera(const QString &httpurl){
Common & instace= Common::getInstance();
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
QString nonConstHttpUrl = std::remove_const<QString>::type(httpurl);
vides_data::responseDeviceData devices;
// QString serialNumber = QSysInfo::machineUniqueId();
QString serialNumber;
findLocalSerialNumber(serialNumber);
qInfo()<<"serialNumber==:"<<serialNumber;
vides_data::requestDeviceStatus reStatus;
reStatus.sSn=serialNumber;
reStatus.status=1;
......@@ -348,7 +358,7 @@ void MainWindow::startCamera(const QString &httpurl){
QString key = ipAddress + ":" + QString::number(localDevice->TCPPort);
if(faceDetectionParkingPushs.count(key)<=0){
httpService.setHttpUrl(httpurl);
vides_data::cameraParameters parameter;
parameter.sDevId=ipAddress;
parameter.nDevPort=localDevice->TCPPort;
......@@ -382,12 +392,12 @@ void MainWindow::startCamera(const QString &httpurl){
}
}
}
}
}
}
this->deleteCloudNotCamer(localDevices, devices.list);
for (auto& pair : localDevices) {
if (pair.second != nullptr) { // 如果对象未被删除(即不为nullptr)
instace.deleteObj(pair.second);
......@@ -403,10 +413,10 @@ void MainWindow::startCamera(const QString &httpurl){
}
instace.deleteObj(res);
updateLocalFace(httpurl);
instace.deleteObj(re);
}
bool MainWindow::isDeviceInList(const QString& deviceId, const std::list<vides_data::responseDeviceStatus>& devices) {
......@@ -447,7 +457,7 @@ void MainWindow::initDevConfigSyn(CameraHandle *cameraHandle){
void MainWindow::iniEncodeToString(QString &enCodeJson) {
// 创建 JSON 对象
QJsonObject rootObject;
// 添加 ExtraFormat 到 JSON 对象中
QJsonObject extraFormatObject;
QJsonObject videoObjectExtra = {
......@@ -464,7 +474,7 @@ void MainWindow::iniEncodeToString(QString &enCodeJson) {
extraFormatObject["AudioEnable"] = qSetting->value("ExtraFormat/AudioEnable").toBool();
extraFormatObject["Video"] = videoObjectExtra;
rootObject["ExtraFormat"] = extraFormatObject;
// 添加 MainFormat 到 JSON 对象中
QJsonObject mainFormatObject;
QJsonObject videoObjectMain = {
......@@ -483,7 +493,7 @@ void MainWindow::iniEncodeToString(QString &enCodeJson) {
rootObject["MainFormat"] = mainFormatObject;
QJsonArray jsonArray;
jsonArray.append(rootObject);
// 将 JSON 对象转换为 JSON 文档
QJsonDocument jsonDocument(jsonArray);
enCodeJson = QString::fromUtf8(jsonDocument.toJson());
......@@ -493,7 +503,7 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
qDebug()<<"iniWorkSpVMn=="<<sn;
QString jsonfile=QCoreApplication::applicationDirPath()+"/camera_config.json";
bool isEqual=true;
// 读取 JSON 配置文件
QFile file(jsonfile);
if (!file.open(QIODevice::ReadOnly)) {
......@@ -501,13 +511,13 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
return isEqual;
}
QJsonObject toJsonObject;
// 解析 JSON 数据
QByteArray jsonData = file.readAll();
file.close();
QJsonDocument jsonDoc = QJsonDocument::fromJson(jsonData);
QJsonObject rootObj = jsonDoc.object();
// 获取 cameraconfigs 对象
QJsonArray cameraConfigs = rootObj.value("cameraconfigs").toArray();
bool found = false;
......@@ -552,12 +562,12 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
toJsonObject["iHsIntervalTime"]=gb28181->heartbeat_interval;
toJsonObject["szConnPass"]=gb28181->password;
toJsonObject["szDeviceNO"]=gb28181->device_id;
QVariantList variantList;
for (int i = 0; i < 64; ++i) {
variantList.append(QVariant(0));
}
QJsonArray levelArray = QJsonArray::fromVariantList(variantList);
toJsonObject["AlarmLevel"]=levelArray;
QStringList alarmidStrings;
......@@ -570,9 +580,9 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
for (int i = 0; i < 64; ++i) {
variantListLevel.append(QVariant(0));
}
QJsonArray camreaLevelArray = QJsonArray::fromVariantList(variantListLevel);
toJsonObject["CamreaLevel"]=camreaLevelArray;
QStringList camreaidStrings;
for (int i = 1; i <= 64; ++i) {
......@@ -610,7 +620,7 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
newValue["iHsIntervalTime"]=gb28181->heartbeat_interval;
newValue["szConnPass"]=gb28181->password;
newValue["szDeviceNO"]=gb28181->device_id;
toJsonObject["szCsIP"]=gb28181->sip_ip ;
toJsonObject["szServerNo"]=gb28181->serial;
toJsonObject["sCsPort"]=gb28181->sip_port;
......@@ -619,14 +629,14 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
toJsonObject["iHsIntervalTime"]=gb28181->heartbeat_interval;
toJsonObject["szConnPass"]=gb28181->password;
toJsonObject["szDeviceNO"]=gb28181->device_id;
QVariantList variantList;
for (int i = 0; i < 64; ++i) {
variantList.append(0);
}
QJsonArray levelArray = QJsonArray::fromVariantList(variantList);
toJsonObject["AlarmLevel"]=levelArray;
QStringList alarmidStrings;
for (int i = 1; i <= 64; ++i) {
......@@ -675,14 +685,14 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
QJsonDocument saveDoc(rootObj);
file.write(saveDoc.toJson());
file.close();
}
return isEqual;
}
void MainWindow::iniRecordingToString(QString &recorJson){
QJsonObject jsonObject;
// 读取 Mask 数据
QJsonArray maskArray;
// 遍历所有掩码
......@@ -691,27 +701,27 @@ void MainWindow::iniRecordingToString(QString &recorJson){
// 读取掩码值。存储为QStringList,就像在ini文件中定义的一样
QStringList maskValues = qSetting->value(maskKey).toStringList();
QJsonArray maskSubArray;
foreach (const QString &value, maskValues) {
maskSubArray.append(value.trimmed());
}
maskArray.append(maskSubArray);
}
jsonObject["Mask"] = maskArray;
// 读取 Packet 数据
jsonObject["PacketLength"] =qSetting->value("Packet/PacketLength").toInt();
jsonObject["PreRecord"] = qSetting->value("Packet/PreRecord").toInt();
jsonObject["RecordMode"] = qSetting->value("Packet/RecordMode").toString();
jsonObject["Redundancy"] = qSetting->value("Packet/Redundancy").toBool();
// 读取 TimeSection 数据
QJsonArray timeArray;
for (int ts = 1; ts <= 7; ts++) {
QString tsKey = QString("TimeSection/TimeSection_%1").arg(ts);
// 读取掩码值。存储为QStringList,就像在ini文件中定义的一样
QStringList tsValues = qSetting->value(tsKey).toStringList();
QJsonArray timeSubArray;
......@@ -741,9 +751,9 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
float carConfidence=qSetting->value("devices/carConfidence").toFloat();
int image_save=qSetting->value("devices/image_save").toInt();
CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,modelPaths,carConfidence,image_save);
int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,10000);
qDebug()<<"句柄为2:"<<sdk_handle;
if(sdk_handle<=0){
......@@ -752,11 +762,11 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
}
initDevConfigSyn(cameraHandle);
mediaFaceImage->setMap(sdk_handle,cameraHandle);
cameraHandle->sdkDevSetAlarmListener(sdk_handle,1);
int synTime=qSetting->value("timer/dev_snap_syn_timer").toInt();
uint64 face_frequency=qSetting->value("devices/face_frequency").toULongLong();
cameraHandle->initSdkRealTimeDevSnapSyn(sdk_handle,synTime,face_frequency);
vides_data::requestCameraInfo camera_info;
camera_info.sSn=parameter.sSn;
......@@ -769,7 +779,7 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
cameraHandle->initParkingSpaceInfo(areas);
Common & instace= Common::getInstance();
QString key =parameter.sDevId + ":" + QString::number(parameter.nDevPort);
faceDetectionParkingPushs[key]= cameraHandle;
HttpService httpService(parameter.httpUrl);
......@@ -842,15 +852,15 @@ MainWindow::~MainWindow()
instace.deleteObj(deleteLogFileTimer);
instace.deleteObj(deleteFrameFileTimer);
instace.deleteObj(dePermissionSynTimer);
for(auto iter = faceDetectionParkingPushs.begin(); iter != faceDetectionParkingPushs.end(); ++iter) {
instace.deleteObj( iter->second);
}
// 清空 handleMap
faceDetectionParkingPushs.clear();
LogHandler::Get().uninstallMessageHandler();
}
void MainWindow::deleteMkvFileTimer(){
......@@ -869,7 +879,7 @@ void MainWindow::deleteMkvFileTimer(){
}
}
}
}
void MainWindow::deleteLogFile(){
......@@ -878,20 +888,20 @@ void MainWindow::deleteLogFile(){
// 前7天
QDateTime dateTime1 = now.addDays(-7);
QDateTime dateTime2;
QString logPath = logDir.absoluteFilePath(""); // 日志的路径
QDir dir(logPath);
QStringList filename ;
filename << "*.log";//可叠加,可使用通配符筛选
QFileInfoList fileList = dir.entryInfoList(filename);
foreach (QFileInfo f, fileList) {
// "."和".."跳过
if (f.baseName() == "" || f.baseName()=="today" )
continue;
dateTime2 = QDateTime::fromString(f.baseName(), "yyyy-MM-dd");
if (dateTime2 < dateTime1) { // 只要日志时间小于前7天的时间就删除
dir.remove(f.absoluteFilePath());
......@@ -900,11 +910,11 @@ void MainWindow::deleteLogFile(){
}
void MainWindow::initFaceFaceRecognition() {
qSetting->beginGroup("cloudImageMap");
QStringList keys = qSetting->childKeys();
foreach(QString key, keys) {
QString value = qSetting->value(key).toString();
cloudImageMap[key]=value;
......@@ -912,7 +922,7 @@ void MainWindow::initFaceFaceRecognition() {
qSetting->endGroup();
qSetting->beginGroup("localImageMap");
QStringList lokeys = qSetting->childKeys();
foreach(QString lk, lokeys) {
// 获取键对应的值
QString value = qSetting->value(lk).toString();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment