Commit 476b9ed9 by “liusq”

增加析构同时添加关闭状态

parent 784d6dae
...@@ -39,8 +39,10 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch ...@@ -39,8 +39,10 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch
} }
CameraHandle::~CameraHandle() { CameraHandle::~CameraHandle() {
stopRequested_=true;
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
dev_snap_syn_timer->stop(); dev_snap_syn_timer->stop();
QThreadPool::globalInstance()->waitForDone();
instace.deleteObj(dev_snap_syn_timer); instace.deleteObj(dev_snap_syn_timer);
instace.deleteObj(loginParam); instace.deleteObj(loginParam);
instace.deleteObj(sxMediaFaceImageReq); instace.deleteObj(sxMediaFaceImageReq);
...@@ -52,7 +54,7 @@ CameraHandle::~CameraHandle() { ...@@ -52,7 +54,7 @@ CameraHandle::~CameraHandle() {
instace.deleteObj( iter->second); instace.deleteObj( iter->second);
} }
parkMap.clear(); parkMap.clear();
QThreadPool::globalInstance()->waitForDone();
} }
...@@ -247,7 +249,6 @@ void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 fa ...@@ -247,7 +249,6 @@ void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 fa
dev_snap_syn_timer->start(syn_timer); dev_snap_syn_timer->start(syn_timer);
} }
void CameraHandle::sdkRealTimeDevSnapSyn(int hDevice) { void CameraHandle::sdkRealTimeDevSnapSyn(int hDevice) {
QThreadPool* threadPool = QThreadPool::globalInstance(); QThreadPool* threadPool = QThreadPool::globalInstance();
threadPool->setMaxThreadCount(8); threadPool->setMaxThreadCount(8);
auto taskSyn = std::bind(&CameraHandle::sdkDevSnapSyn, this, hDevice, this->channel); auto taskSyn = std::bind(&CameraHandle::sdkDevSnapSyn, this, hDevice, this->channel);
...@@ -325,6 +326,7 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id, ...@@ -325,6 +326,7 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
} }
int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) { int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
if (stopRequested_) return -1;
if (!semaphore.tryAcquire()) { if (!semaphore.tryAcquire()) {
qInfo() << "sdkDevSnapSyn:正在执行线程"; qInfo() << "sdkDevSnapSyn:正在执行线程";
return -1; return -1;
...@@ -368,6 +370,7 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) { ...@@ -368,6 +370,7 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
} }
void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
if (stopRequested_) return;
if(hDevice<=0){ if(hDevice<=0){
qInfo() << "相机断线"; qInfo() << "相机断线";
return; return;
...@@ -440,7 +443,7 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma ...@@ -440,7 +443,7 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
//当前为空,立场 //当前为空,立场
if(newInfo.getLicensePlate().length()<=0){ if(newInfo.getLicensePlate().length()<=0){
HumanDetection &humanDetection=HumanDetection::getInstance(); HumanDetection &humanDetection=HumanDetection::getInstance();
int car_size = humanDetection.findHuManCar(frame,1,detector); int car_size = humanDetection.findHuManCar(frame,0x01,detector);
qDebug()<<sSn<<":"<<"当前车形数量:"<<car_size; qDebug()<<sSn<<":"<<"当前车形数量:"<<car_size;
if(car_size<=0){ if(car_size<=0){
//出场 //出场
...@@ -491,18 +494,18 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -491,18 +494,18 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
QByteArray imgs; QByteArray imgs;
this->matToBase64(frame, imgs); this->matToBase64(frame, imgs);
HttpService httpService(httpUrl); HttpService httpService(httpUrl);
int faSize =-1; int faSize =0;
if ((algorithmPermissions & 0x01<<1) != 0) { if ((algorithmPermissions & 0x01<<1) != 0) {
faSize=humanDetection.findHuManCar(frame,0,detector); faSize=humanDetection.findHuManCar(frame,0x00,detector);
if(currentFace!=faSize){ if(currentFace!= faSize){
if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){ if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){
vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,sSn,currentTime); vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,sSn,currentTime);
if (resp->code!= 0) { if (resp->code!= 0) {
qInfo()<<"人数变化推送信息推送失败"; qInfo()<<"人数变化推送信息推送失败";
} }
instace.deleteObj(resp); instace.deleteObj(resp);
currentFace=faSize; currentFace = faSize;
} }
} }
} }
...@@ -511,7 +514,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){ ...@@ -511,7 +514,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
if(uniforms>0 ){ if(uniforms>0 ){
if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){ if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){
httpService.setHttpUrl(httpUrl); httpService.setHttpUrl(httpUrl);
vides_data::response* resp=httpService.httpPostUniforms(imgs,faSize,sSn,currentTime); vides_data::response* resp=httpService.httpPostUniforms(imgs,uniforms,sSn,currentTime);
if (resp->code!= 0) { if (resp->code!= 0) {
qInfo()<<"推送未穿工服人数失败"; qInfo()<<"推送未穿工服人数失败";
} }
......
...@@ -126,6 +126,7 @@ private : ...@@ -126,6 +126,7 @@ private :
int hDevice; int hDevice;
int channel; int channel;
QString httpUrl; QString httpUrl;
bool stopRequested_ = false;
SXSDKLoginParam *loginParam; SXSDKLoginParam *loginParam;
SXMediaFaceImageReq *sxMediaFaceImageReq; SXMediaFaceImageReq *sxMediaFaceImageReq;
......
...@@ -33,6 +33,8 @@ public: ...@@ -33,6 +33,8 @@ public:
cv::Mat loadImage(const QString &path); cv::Mat loadImage(const QString &path);
cv::Mat loadImageFromByteStream(const QString& filePath); cv::Mat loadImageFromByteStream(const QString& filePath);
void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face); void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face);
void initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence); void initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence);
......
...@@ -2,6 +2,9 @@ ...@@ -2,6 +2,9 @@
#include <QImage> #include <QImage>
#include <QThread> #include <QThread>
#include <iostream>
#define cimg_display 0 #define cimg_display 0
#include "CImg.h" #include "CImg.h"
...@@ -20,31 +23,6 @@ FaceReconition::~FaceReconition(){ ...@@ -20,31 +23,6 @@ FaceReconition::~FaceReconition(){
FaceReconition* FaceReconition::instance = nullptr; FaceReconition* FaceReconition::instance = nullptr;
//cv::Mat FaceReconition::loadImage(const QString &path) {
// // 尝试使用OpenCV直接加载图像
// std::string stdPath = path.toStdString(); // 将路径转换为std::string
// cv::Mat image = cv::imread(stdPath, cv::IMREAD_COLOR); // 尝试加载图像
// if (!image.empty()) {
// qDebug() << "图像以OpenCV成功加载。";
// return image;
// }
// // 使用OpenCV加载失败,尝试使用QImage
// qDebug() << "使用OpenCV加载图像失败,尝试QImage转换。";
// QImage qimg(path);
// if (qimg.isNull()) {
// qDebug() << "QImage也无法加载图像,检查文件路径或文件损坏。";
// return cv::Mat(); // 返回空的cv::Mat对象
// }
// // 转换QImage格式为RGB888
// QImage converted = qimg.convertToFormat(QImage::Format_RGB888);
// cv::Mat mat(converted.height(), converted.width(), CV_8UC3, const_cast<uchar*>(converted.bits()), static_cast<size_t>(converted.bytesPerLine()));
// // 不进行颜色转换,直接返回
// return mat;
//}
cv::Mat FaceReconition::loadImage(const QString &path) { cv::Mat FaceReconition::loadImage(const QString &path) {
// 尝试使用OpenCV直接加载图像 // 尝试使用OpenCV直接加载图像
std::string stdPath = path.toStdString(); std::string stdPath = path.toStdString();
...@@ -53,7 +31,7 @@ cv::Mat FaceReconition::loadImage(const QString &path) { ...@@ -53,7 +31,7 @@ cv::Mat FaceReconition::loadImage(const QString &path) {
qDebug() << "图像以OpenCV成功加载。"; qDebug() << "图像以OpenCV成功加载。";
return image; return image;
} }
return loadImageFromByteStream(path); return loadImageFromByteStream(path);
} }
...@@ -69,7 +47,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe ...@@ -69,7 +47,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
QString bPath = QCoreApplication::applicationDirPath() + "/model_zip/T1_5"; QString bPath = QCoreApplication::applicationDirPath() + "/model_zip/T1_5";
#else #else
#error "不支持的架构" #error "不支持的架构"
#endif #endif
QByteArray && bypath = bPath.toUtf8(); QByteArray && bypath = bPath.toUtf8();
char* spath = bypath.data(); char* spath = bypath.data();
...@@ -87,7 +65,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe ...@@ -87,7 +65,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
customIds.clear(); customIds.clear();
int i = 0; int i = 0;
qInfo()<< "加载图像size: "<<maps.size(); qInfo()<< "加载图像size: "<<maps.size();
for (auto it = maps.begin(); it != maps.end(); ++it,++i) { for (auto it = maps.begin(); it != maps.end(); ++it,++i) {
const QString& key = it->first; const QString& key = it->first;
QString& value = it->second; QString& value = it->second;
...@@ -102,7 +80,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe ...@@ -102,7 +80,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
imageData.width = image.cols; imageData.width = image.cols;
imageData.rotation = VIEW_ROTATION_0; imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR; imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle; HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle); ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
this->configConfidence=confidence; this->configConfidence=confidence;
...@@ -111,25 +89,25 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe ...@@ -111,25 +89,25 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
HF_ReleaseImageStream(imageSteamHandle); // 释放资源 HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return; return;
} }
HF_MultipleFaceData multipleFaceData = {0}; HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData); HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) { if (multipleFaceData.detectedNum <= 0) {
qInfo() << QString("initSourceImageMap:未检测到人脸: %1").arg(key); qInfo() << QString("initSourceImageMap:未检测到人脸: %1").arg(key);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源 HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return; return;
} }
HF_FaceFeature feature = {0}; HF_FaceFeature feature = {0};
ret = HF_FaceFeatureExtract(ctxHandle, imageSteamHandle, multipleFaceData.tokens[0], &feature); ret = HF_FaceFeatureExtract(ctxHandle, imageSteamHandle, multipleFaceData.tokens[0], &feature);
if (ret != HSUCCEED) { if (ret != HSUCCEED) {
qInfo() << QString("特征提取出错: %1").arg(ret); qInfo() << QString("特征提取出错: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源 HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return; return;
} }
char* tagName = new char[key.size() + 1]; char* tagName = new char[key.size() + 1];
std::strcpy(tagName, key.toStdString().c_str()); std::strcpy(tagName, key.toStdString().c_str());
HF_FaceFeatureIdentity identity = {0}; HF_FaceFeatureIdentity identity = {0};
...@@ -137,16 +115,16 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe ...@@ -137,16 +115,16 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
identity.customId = i; identity.customId = i;
customIds.push_back( identity.customId); customIds.push_back( identity.customId);
identity.tag = tagName; identity.tag = tagName;
ret = HF_FeaturesGroupInsertFeature(ctxHandle, identity); ret = HF_FeaturesGroupInsertFeature(ctxHandle, identity);
if (ret != HSUCCEED) { if (ret != HSUCCEED) {
qInfo() << QString("插入失败: %1").arg(ret); qInfo() << QString("插入失败: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源 HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return; return;
} }
delete[] tagName; delete[] tagName;
ret = HF_ReleaseImageStream(imageSteamHandle); ret = HF_ReleaseImageStream(imageSteamHandle);
if (ret == HSUCCEED) { if (ret == HSUCCEED) {
imageSteamHandle = nullptr; imageSteamHandle = nullptr;
...@@ -165,9 +143,11 @@ int FaceReconition::featureRemove(){ ...@@ -165,9 +143,11 @@ int FaceReconition::featureRemove(){
} }
} }
} }
cv::Mat FaceReconition::loadImageFromByteStream(const QString& filePath) { cv::Mat FaceReconition::loadImageFromByteStream(const QString& filePath) {
try {
try {
// 使用 CImg 读取 JPEG 图像 // 使用 CImg 读取 JPEG 图像
QByteArray bPath =filePath.toUtf8(); QByteArray bPath =filePath.toUtf8();
const char* ctr=bPath.data(); const char* ctr=bPath.data();
...@@ -176,34 +156,29 @@ cv::Mat FaceReconition::loadImageFromByteStream(const QString& filePath) { ...@@ -176,34 +156,29 @@ cv::Mat FaceReconition::loadImageFromByteStream(const QString& filePath) {
// 将 CImg 对象转换为 OpenCV 的 Mat 格式 // 将 CImg 对象转换为 OpenCV 的 Mat 格式
int width = cimg_image.width(); int width = cimg_image.width();
int height = cimg_image.height(); int height = cimg_image.height();
int channels = cimg_image.spectrum(); // 图像通道数 cv::Mat mat(height, width, CV_8UC3);
// 创建一个对应的空 Mat 对象
cv::Mat opencv_image(height, width, channels == 1 ? CV_8UC1 : CV_8UC3);
// 复制图像数据到 Mat 对象
cimg_forXY(cimg_image, x, y) { cimg_forXY(cimg_image, x, y) {
if (channels == 1) { // 注意OpenCV默认是BGR顺序
opencv_image.at<unsigned char>(y, x) = cimg_image(x, y, 0, 0); // CImg中像素的存取方式是 (x, y, z, c) 其中c是颜色通道
} else { mat.at<cv::Vec3b>(y, x)[2] = cimg_image(x, y, 0, 0); // R
// 注意 OpenCV 默认使用 BGR 而不是 RGB mat.at<cv::Vec3b>(y, x)[1] = cimg_image(x, y, 0, 1); // G
cv::Vec3b& opencv_pixel = opencv_image.at<cv::Vec3b>(y, x); mat.at<cv::Vec3b>(y, x)[0] = cimg_image(x, y, 0, 2); // B
opencv_pixel[2] = cimg_image(x, y, 0, 0); // Red
opencv_pixel[1] = cimg_image(x, y, 0, 1); // Green
opencv_pixel[0] = cimg_image(x, y, 0, 2); // Blue
}
} }
return opencv_image; return mat;
} catch (const CImgException& e) { } catch (const CImgException& e) {
qDebug() << "CImg Error: " << e.what(); qDebug() << "CImg Error: " << e.what();
return cv::Mat();; return cv::Mat();;
} catch (const cv::Exception& e) { } catch (const cv::Exception& e) {
qDebug() << "OpenCV Error: " << e.what(); qDebug() << "OpenCV Error: " << e.what();
return cv::Mat(); ; return cv::Mat(); ;
} }
return cv::Mat(); return cv::Mat();
} }
void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
//QReadLocker locker(&rwLock); //QReadLocker locker(&rwLock);
HResult ret; HResult ret;
...@@ -214,7 +189,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d ...@@ -214,7 +189,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
imageData.width = source.cols; imageData.width = source.cols;
imageData.rotation = VIEW_ROTATION_0; imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR; imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle; HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle); ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
if (ret != HSUCCEED) { if (ret != HSUCCEED) {
...@@ -223,12 +198,12 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d ...@@ -223,12 +198,12 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
} }
HF_MultipleFaceData multipleFaceData = {0}; HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData); HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) { if (multipleFaceData.detectedNum <= 0) {
qDebug()<<QString("search 未检测到人脸"); qDebug()<<QString("search 未检测到人脸");
return ; return ;
} }
std::vector<std::vector<float>> features; std::vector<std::vector<float>> features;
// 被搜索的目标这边推荐使用拷贝式的接口来获取特征向量 // 被搜索的目标这边推荐使用拷贝式的接口来获取特征向量
HInt32 featureNum; HInt32 featureNum;
...@@ -257,7 +232,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d ...@@ -257,7 +232,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
qInfo()<<QString("搜索失败: %1").arg(ret); qInfo()<<QString("搜索失败: %1").arg(ret);
return ; return ;
} }
qDebug()<<QString("搜索置信度: %1").arg(confidence); qDebug()<<QString("搜索置信度: %1").arg(confidence);
qDebug()<<QString("匹配到的tag: %1").arg(searchIdentity.tag); qDebug()<<QString("匹配到的tag: %1").arg(searchIdentity.tag);
qDebug()<<QString("匹配到的customId: %1").arg(searchIdentity.customId); qDebug()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
...@@ -272,7 +247,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d ...@@ -272,7 +247,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
newface.height=multipleFaceData.rects[rect].height; newface.height=multipleFaceData.rects[rect].height;
faces.push_back(newface); faces.push_back(newface);
} }
rect++; rect++;
} }
ret = HF_ReleaseImageStream(imageSteamHandle); ret = HF_ReleaseImageStream(imageSteamHandle);
......
...@@ -3,6 +3,7 @@ ...@@ -3,6 +3,7 @@
vides_data::responseStsCredentials HttpService::stsCredentials; vides_data::responseStsCredentials HttpService::stsCredentials;
HttpService::HttpService() { HttpService::HttpService() {
} }
HttpService::HttpService(QString httpUrl) { HttpService::HttpService(QString httpUrl) {
this->httpUrl=httpUrl; this->httpUrl=httpUrl;
......
...@@ -15,7 +15,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector ...@@ -15,7 +15,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector
// 执行一帧目标检测 // 执行一帧目标检测
TCV_HumanDetectorProcessFrame(detector, stream); TCV_HumanDetectorProcessFrame(detector, stream);
int num=-1; int num=0;
if(res==0x00 || res==0x02){ if(res==0x00 || res==0x02){
num= TCV_HumanDetectorGetNumOfHuman(detector); num= TCV_HumanDetectorGetNumOfHuman(detector);
if (num > 0 && res==0x02) { if (num > 0 && res==0x02) {
...@@ -32,11 +32,11 @@ int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector ...@@ -32,11 +32,11 @@ int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector
} }
num=num_uniforms; num=num_uniforms;
} }
qDebug() << (res == 0 ? "Number of people detected:" : "Number of people with uniform == 0 detected:") << num; qDebug() << (res == 0 ? "findHuManCar 检测到的人数:" : "findHuManCar 未穿工服的人数:") << num;
}else if (res==0x01) { }else if (res==0x01) {
num=TCV_HumanDetectorGetNumOfCar(detector); num=TCV_HumanDetectorGetNumOfCar(detector);
qDebug() << "Number of cars detected:" << num; qDebug() << "findHuManCar 检测到的汽车数量:" << num;
}else { }else {
qDebug() << "参数错误"; qDebug() << "参数错误";
......
#include "ScopeSemaphoreExit.h" #include "ScopeSemaphoreExit.h"
ScopeSemaphoreExit::ScopeSemaphoreExit(std::function<void()> onExit) ScopeSemaphoreExit::ScopeSemaphoreExit(std::function<void()> onExit)
: onExit_(onExit) {} : onExit_(onExit) {}
ScopeSemaphoreExit::~ScopeSemaphoreExit() { ScopeSemaphoreExit::~ScopeSemaphoreExit() {
if (onExit_) onExit_(); if (onExit_) onExit_();
} }
...@@ -272,23 +272,63 @@ inline bool pingAddress(const QString &address) { ...@@ -272,23 +272,63 @@ inline bool pingAddress(const QString &address) {
return output.contains("1 packets transmitted, 1 received"); return output.contains("1 packets transmitted, 1 received");
} }
inline int GetCpuIdByAsm_arm(char* cpu_id)
{
FILE *fp = fopen("/proc/cpuinfo", "r");
if(nullptr == fp)
{
qDebug()<<"failed to open cpuinfo";
return -1;
}
inline QString getCpuSerialNumber() { char cpuSerial[100] = {0};
QProcess process;
// 启动一个进程运行shell命令 while(!feof(fp))
process.start("sh", QStringList() << "-c" << "cat /proc/cpuinfo | grep Serial"); {
// 等待命令执行完成 memset(cpuSerial, 0, sizeof(cpuSerial));
process.waitForFinished(); fgets(cpuSerial, sizeof(cpuSerial) - 1, fp); // leave out \n
// 读取命令的标准输出
QString output = process.readAllStandardOutput(); char* pch = strstr(cpuSerial,"Serial");
if (pch)
{
char* pch2 = strchr(cpuSerial, ':');
if (pch2)
{
memmove(cpu_id, pch2 + 2, strlen(cpuSerial));
QString serialNumber; break;
if (!output.isEmpty()) { }
// 已经确保了输出仅包含 Serial 行,所以直接分割并提取 else
serialNumber = output.split(":").last().trimmed(); {
fclose(fp);
return -1;
}
}
} }
fclose(fp);
return 0;
}
return serialNumber; inline QString getCpuSerialNumber() {
QFile file("/proc/cpuinfo");
if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) {
qDebug() << "无法打开 /proc/cpuinfo 文件";
return QString();
}
QTextStream in(&file);
QString line;
while (!in.atEnd()) {
line = in.readLine();
if (line.startsWith("Serial")) {
QStringList parts = line.split(":");
if (parts.size() > 1) {
QString serial = parts.at(1).trimmed();
return serial;
}
}
}
return QString();
} }
} }
......
...@@ -11,46 +11,46 @@ TEMPLATE = app ...@@ -11,46 +11,46 @@ TEMPLATE = app
# depend on your compiler). Please consult the documentation of the # depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it. # deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.0.0\\\" DEFINES += APP_VERSION=\\\"1.0.1\\\"
#QMAKE_LIBDIR += /usr/local/lib QMAKE_LIBDIR += /usr/local/lib
#INCLUDEPATH+=/usr/local/include/opencv4 INCLUDEPATH+=/usr/local/include/opencv4
#INCLUDEPATH+=/usr/local/include/hyperface INCLUDEPATH+=/usr/local/include/hyperface
#INCLUDEPATH+=/usr/local/include/hyper INCLUDEPATH+=/usr/local/include/hyper
#INCLUDEPATH+=/usr/local/include/XNetSDK INCLUDEPATH+=/usr/local/include/XNetSDK
#INCLUDEPATH+=/usr/local/include/human INCLUDEPATH+=/usr/local/include/human
#INCLUDEPATH+=/usr/local/include/CImg INCLUDEPATH+=/usr/local/include/CImg
unix:contains(QMAKE_HOST.arch, x86_64) { #unix:contains(QMAKE_HOST.arch, x86_64) {
QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib # QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
} #}
unix:contains(QMAKE_HOST.arch, arm) { #unix:contains(QMAKE_HOST.arch, arm) {
QMAKE_LIBDIR += /usr/local/lib # QMAKE_LIBDIR += /usr/local/lib
} #}
# 根据编译器类型选择库路径和头文件路径 ## 根据编译器类型选择库路径和头文件路径
unix: { #unix: {
# x86 架构 # # x86 架构
contains(QMAKE_HOST.arch, x86_64) { # contains(QMAKE_HOST.arch, x86_64) {
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4 # INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface # INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper # INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK # INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human # INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/CImg # INCLUDEPATH+=/home/mark/Public/x86_opencv/include/CImg
} # }
# ARM 架构 # # ARM 架构
contains(QMAKE_HOST.arch, arm) { # contains(QMAKE_HOST.arch, arm) {
INCLUDEPATH+=/usr/local/include/opencv4 # INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface # INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper # INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK # INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human # INCLUDEPATH+=/usr/local/include/human
} # }
} #}
# You can also make your code fail to compile if it uses deprecated APIs. # You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line. # In order to do so, uncomment the following line.
...@@ -76,7 +76,7 @@ LIBS += -lopencv_core \ ...@@ -76,7 +76,7 @@ LIBS += -lopencv_core \
-lopencv_objdetect \ -lopencv_objdetect \
-lsohuman \ -lsohuman \
# -lssl \ # -lssl \
# -lcrypto \ # -lcrypto \ sudo apt-get install libjpeg-dev libpng-dev
-lc \ -lc \
-lXNetSDK -lXNetSDK
#-lz #-lz
......
...@@ -284,7 +284,17 @@ void MainWindow::findLocalSerialNumber(QString &serialNumber){ ...@@ -284,7 +284,17 @@ void MainWindow::findLocalSerialNumber(QString &serialNumber){
if(localSn.length()>0){ if(localSn.length()>0){
serialNumber=localSn; serialNumber=localSn;
}else { }else {
serialNumber =vides_data::getCpuSerialNumber(); QString number= vides_data::getCpuSerialNumber();
if(number.length()<=0){
char cpu_id_arm[40] = {0};
int result = vides_data::GetCpuIdByAsm_arm(cpu_id_arm); //EC-06-08-00-FF-FB-8B-1F
if (result>= 0)
{
number=cpu_id_arm;
}
}
serialNumber =number.trimmed();
localSn=serialNumber; localSn=serialNumber;
} }
} }
...@@ -295,7 +305,7 @@ void MainWindow::clearHandle(QString sDevId, int nDevPort){ ...@@ -295,7 +305,7 @@ void MainWindow::clearHandle(QString sDevId, int nDevPort){
auto it = this->faceDetectionParkingPushs.find(key); auto it = this->faceDetectionParkingPushs.find(key);
if (it != this->faceDetectionParkingPushs.end()) { if (it != this->faceDetectionParkingPushs.end()) {
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qDebug()<<"clearHandle:离线的设备是:"<<key; qDebug()<<"clearHandle:离线的设备是:"<<key;
CameraHandle* offlineCameraHandle = it->second; // 注意使用->second获取值 CameraHandle* offlineCameraHandle = it->second; // 注意使用->second获取值
int hDevice=offlineCameraHandle->getHdevice(); int hDevice=offlineCameraHandle->getHdevice();
...@@ -313,14 +323,14 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -313,14 +323,14 @@ void MainWindow::startCamera(const QString &httpurl){
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
QString nonConstHttpUrl = std::remove_const<QString>::type(httpurl); QString nonConstHttpUrl = std::remove_const<QString>::type(httpurl);
vides_data::responseDeviceData devices; vides_data::responseDeviceData devices;
// QString serialNumber = QSysInfo::machineUniqueId(); // QString serialNumber = QSysInfo::machineUniqueId();
QString serialNumber; QString serialNumber;
findLocalSerialNumber(serialNumber); findLocalSerialNumber(serialNumber);
qInfo()<<"serialNumber==:"<<serialNumber; qInfo()<<"serialNumber==:"<<serialNumber;
vides_data::requestDeviceStatus reStatus; vides_data::requestDeviceStatus reStatus;
reStatus.sSn=serialNumber; reStatus.sSn=serialNumber;
reStatus.status=1; reStatus.status=1;
...@@ -348,7 +358,7 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -348,7 +358,7 @@ void MainWindow::startCamera(const QString &httpurl){
QString key = ipAddress + ":" + QString::number(localDevice->TCPPort); QString key = ipAddress + ":" + QString::number(localDevice->TCPPort);
if(faceDetectionParkingPushs.count(key)<=0){ if(faceDetectionParkingPushs.count(key)<=0){
httpService.setHttpUrl(httpurl); httpService.setHttpUrl(httpurl);
vides_data::cameraParameters parameter; vides_data::cameraParameters parameter;
parameter.sDevId=ipAddress; parameter.sDevId=ipAddress;
parameter.nDevPort=localDevice->TCPPort; parameter.nDevPort=localDevice->TCPPort;
...@@ -382,12 +392,12 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -382,12 +392,12 @@ void MainWindow::startCamera(const QString &httpurl){
} }
} }
} }
} }
} }
} }
this->deleteCloudNotCamer(localDevices, devices.list); this->deleteCloudNotCamer(localDevices, devices.list);
for (auto& pair : localDevices) { for (auto& pair : localDevices) {
if (pair.second != nullptr) { // 如果对象未被删除(即不为nullptr) if (pair.second != nullptr) { // 如果对象未被删除(即不为nullptr)
instace.deleteObj(pair.second); instace.deleteObj(pair.second);
...@@ -403,10 +413,10 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -403,10 +413,10 @@ void MainWindow::startCamera(const QString &httpurl){
} }
instace.deleteObj(res); instace.deleteObj(res);
updateLocalFace(httpurl); updateLocalFace(httpurl);
instace.deleteObj(re); instace.deleteObj(re);
} }
bool MainWindow::isDeviceInList(const QString& deviceId, const std::list<vides_data::responseDeviceStatus>& devices) { bool MainWindow::isDeviceInList(const QString& deviceId, const std::list<vides_data::responseDeviceStatus>& devices) {
...@@ -447,7 +457,7 @@ void MainWindow::initDevConfigSyn(CameraHandle *cameraHandle){ ...@@ -447,7 +457,7 @@ void MainWindow::initDevConfigSyn(CameraHandle *cameraHandle){
void MainWindow::iniEncodeToString(QString &enCodeJson) { void MainWindow::iniEncodeToString(QString &enCodeJson) {
// 创建 JSON 对象 // 创建 JSON 对象
QJsonObject rootObject; QJsonObject rootObject;
// 添加 ExtraFormat 到 JSON 对象中 // 添加 ExtraFormat 到 JSON 对象中
QJsonObject extraFormatObject; QJsonObject extraFormatObject;
QJsonObject videoObjectExtra = { QJsonObject videoObjectExtra = {
...@@ -464,7 +474,7 @@ void MainWindow::iniEncodeToString(QString &enCodeJson) { ...@@ -464,7 +474,7 @@ void MainWindow::iniEncodeToString(QString &enCodeJson) {
extraFormatObject["AudioEnable"] = qSetting->value("ExtraFormat/AudioEnable").toBool(); extraFormatObject["AudioEnable"] = qSetting->value("ExtraFormat/AudioEnable").toBool();
extraFormatObject["Video"] = videoObjectExtra; extraFormatObject["Video"] = videoObjectExtra;
rootObject["ExtraFormat"] = extraFormatObject; rootObject["ExtraFormat"] = extraFormatObject;
// 添加 MainFormat 到 JSON 对象中 // 添加 MainFormat 到 JSON 对象中
QJsonObject mainFormatObject; QJsonObject mainFormatObject;
QJsonObject videoObjectMain = { QJsonObject videoObjectMain = {
...@@ -483,7 +493,7 @@ void MainWindow::iniEncodeToString(QString &enCodeJson) { ...@@ -483,7 +493,7 @@ void MainWindow::iniEncodeToString(QString &enCodeJson) {
rootObject["MainFormat"] = mainFormatObject; rootObject["MainFormat"] = mainFormatObject;
QJsonArray jsonArray; QJsonArray jsonArray;
jsonArray.append(rootObject); jsonArray.append(rootObject);
// 将 JSON 对象转换为 JSON 文档 // 将 JSON 对象转换为 JSON 文档
QJsonDocument jsonDocument(jsonArray); QJsonDocument jsonDocument(jsonArray);
enCodeJson = QString::fromUtf8(jsonDocument.toJson()); enCodeJson = QString::fromUtf8(jsonDocument.toJson());
...@@ -493,7 +503,7 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work ...@@ -493,7 +503,7 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
qDebug()<<"iniWorkSpVMn=="<<sn; qDebug()<<"iniWorkSpVMn=="<<sn;
QString jsonfile=QCoreApplication::applicationDirPath()+"/camera_config.json"; QString jsonfile=QCoreApplication::applicationDirPath()+"/camera_config.json";
bool isEqual=true; bool isEqual=true;
// 读取 JSON 配置文件 // 读取 JSON 配置文件
QFile file(jsonfile); QFile file(jsonfile);
if (!file.open(QIODevice::ReadOnly)) { if (!file.open(QIODevice::ReadOnly)) {
...@@ -501,13 +511,13 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work ...@@ -501,13 +511,13 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
return isEqual; return isEqual;
} }
QJsonObject toJsonObject; QJsonObject toJsonObject;
// 解析 JSON 数据 // 解析 JSON 数据
QByteArray jsonData = file.readAll(); QByteArray jsonData = file.readAll();
file.close(); file.close();
QJsonDocument jsonDoc = QJsonDocument::fromJson(jsonData); QJsonDocument jsonDoc = QJsonDocument::fromJson(jsonData);
QJsonObject rootObj = jsonDoc.object(); QJsonObject rootObj = jsonDoc.object();
// 获取 cameraconfigs 对象 // 获取 cameraconfigs 对象
QJsonArray cameraConfigs = rootObj.value("cameraconfigs").toArray(); QJsonArray cameraConfigs = rootObj.value("cameraconfigs").toArray();
bool found = false; bool found = false;
...@@ -552,12 +562,12 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work ...@@ -552,12 +562,12 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
toJsonObject["iHsIntervalTime"]=gb28181->heartbeat_interval; toJsonObject["iHsIntervalTime"]=gb28181->heartbeat_interval;
toJsonObject["szConnPass"]=gb28181->password; toJsonObject["szConnPass"]=gb28181->password;
toJsonObject["szDeviceNO"]=gb28181->device_id; toJsonObject["szDeviceNO"]=gb28181->device_id;
QVariantList variantList; QVariantList variantList;
for (int i = 0; i < 64; ++i) { for (int i = 0; i < 64; ++i) {
variantList.append(QVariant(0)); variantList.append(QVariant(0));
} }
QJsonArray levelArray = QJsonArray::fromVariantList(variantList); QJsonArray levelArray = QJsonArray::fromVariantList(variantList);
toJsonObject["AlarmLevel"]=levelArray; toJsonObject["AlarmLevel"]=levelArray;
QStringList alarmidStrings; QStringList alarmidStrings;
...@@ -570,9 +580,9 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work ...@@ -570,9 +580,9 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
for (int i = 0; i < 64; ++i) { for (int i = 0; i < 64; ++i) {
variantListLevel.append(QVariant(0)); variantListLevel.append(QVariant(0));
} }
QJsonArray camreaLevelArray = QJsonArray::fromVariantList(variantListLevel); QJsonArray camreaLevelArray = QJsonArray::fromVariantList(variantListLevel);
toJsonObject["CamreaLevel"]=camreaLevelArray; toJsonObject["CamreaLevel"]=camreaLevelArray;
QStringList camreaidStrings; QStringList camreaidStrings;
for (int i = 1; i <= 64; ++i) { for (int i = 1; i <= 64; ++i) {
...@@ -610,7 +620,7 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work ...@@ -610,7 +620,7 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
newValue["iHsIntervalTime"]=gb28181->heartbeat_interval; newValue["iHsIntervalTime"]=gb28181->heartbeat_interval;
newValue["szConnPass"]=gb28181->password; newValue["szConnPass"]=gb28181->password;
newValue["szDeviceNO"]=gb28181->device_id; newValue["szDeviceNO"]=gb28181->device_id;
toJsonObject["szCsIP"]=gb28181->sip_ip ; toJsonObject["szCsIP"]=gb28181->sip_ip ;
toJsonObject["szServerNo"]=gb28181->serial; toJsonObject["szServerNo"]=gb28181->serial;
toJsonObject["sCsPort"]=gb28181->sip_port; toJsonObject["sCsPort"]=gb28181->sip_port;
...@@ -619,14 +629,14 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work ...@@ -619,14 +629,14 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
toJsonObject["iHsIntervalTime"]=gb28181->heartbeat_interval; toJsonObject["iHsIntervalTime"]=gb28181->heartbeat_interval;
toJsonObject["szConnPass"]=gb28181->password; toJsonObject["szConnPass"]=gb28181->password;
toJsonObject["szDeviceNO"]=gb28181->device_id; toJsonObject["szDeviceNO"]=gb28181->device_id;
QVariantList variantList; QVariantList variantList;
for (int i = 0; i < 64; ++i) { for (int i = 0; i < 64; ++i) {
variantList.append(0); variantList.append(0);
} }
QJsonArray levelArray = QJsonArray::fromVariantList(variantList); QJsonArray levelArray = QJsonArray::fromVariantList(variantList);
toJsonObject["AlarmLevel"]=levelArray; toJsonObject["AlarmLevel"]=levelArray;
QStringList alarmidStrings; QStringList alarmidStrings;
for (int i = 1; i <= 64; ++i) { for (int i = 1; i <= 64; ++i) {
...@@ -675,14 +685,14 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work ...@@ -675,14 +685,14 @@ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &work
QJsonDocument saveDoc(rootObj); QJsonDocument saveDoc(rootObj);
file.write(saveDoc.toJson()); file.write(saveDoc.toJson());
file.close(); file.close();
} }
return isEqual; return isEqual;
} }
void MainWindow::iniRecordingToString(QString &recorJson){ void MainWindow::iniRecordingToString(QString &recorJson){
QJsonObject jsonObject; QJsonObject jsonObject;
// 读取 Mask 数据 // 读取 Mask 数据
QJsonArray maskArray; QJsonArray maskArray;
// 遍历所有掩码 // 遍历所有掩码
...@@ -691,27 +701,27 @@ void MainWindow::iniRecordingToString(QString &recorJson){ ...@@ -691,27 +701,27 @@ void MainWindow::iniRecordingToString(QString &recorJson){
// 读取掩码值。存储为QStringList,就像在ini文件中定义的一样 // 读取掩码值。存储为QStringList,就像在ini文件中定义的一样
QStringList maskValues = qSetting->value(maskKey).toStringList(); QStringList maskValues = qSetting->value(maskKey).toStringList();
QJsonArray maskSubArray; QJsonArray maskSubArray;
foreach (const QString &value, maskValues) { foreach (const QString &value, maskValues) {
maskSubArray.append(value.trimmed()); maskSubArray.append(value.trimmed());
} }
maskArray.append(maskSubArray); maskArray.append(maskSubArray);
} }
jsonObject["Mask"] = maskArray; jsonObject["Mask"] = maskArray;
// 读取 Packet 数据 // 读取 Packet 数据
jsonObject["PacketLength"] =qSetting->value("Packet/PacketLength").toInt(); jsonObject["PacketLength"] =qSetting->value("Packet/PacketLength").toInt();
jsonObject["PreRecord"] = qSetting->value("Packet/PreRecord").toInt(); jsonObject["PreRecord"] = qSetting->value("Packet/PreRecord").toInt();
jsonObject["RecordMode"] = qSetting->value("Packet/RecordMode").toString(); jsonObject["RecordMode"] = qSetting->value("Packet/RecordMode").toString();
jsonObject["Redundancy"] = qSetting->value("Packet/Redundancy").toBool(); jsonObject["Redundancy"] = qSetting->value("Packet/Redundancy").toBool();
// 读取 TimeSection 数据 // 读取 TimeSection 数据
QJsonArray timeArray; QJsonArray timeArray;
for (int ts = 1; ts <= 7; ts++) { for (int ts = 1; ts <= 7; ts++) {
QString tsKey = QString("TimeSection/TimeSection_%1").arg(ts); QString tsKey = QString("TimeSection/TimeSection_%1").arg(ts);
// 读取掩码值。存储为QStringList,就像在ini文件中定义的一样 // 读取掩码值。存储为QStringList,就像在ini文件中定义的一样
QStringList tsValues = qSetting->value(tsKey).toStringList(); QStringList tsValues = qSetting->value(tsKey).toStringList();
QJsonArray timeSubArray; QJsonArray timeSubArray;
...@@ -741,9 +751,9 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std:: ...@@ -741,9 +751,9 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance(); MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
float carConfidence=qSetting->value("devices/carConfidence").toFloat(); float carConfidence=qSetting->value("devices/carConfidence").toFloat();
int image_save=qSetting->value("devices/image_save").toInt(); int image_save=qSetting->value("devices/image_save").toInt();
CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,modelPaths,carConfidence,image_save); CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,modelPaths,carConfidence,image_save);
int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,10000); int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,10000);
qDebug()<<"句柄为2:"<<sdk_handle; qDebug()<<"句柄为2:"<<sdk_handle;
if(sdk_handle<=0){ if(sdk_handle<=0){
...@@ -752,11 +762,11 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std:: ...@@ -752,11 +762,11 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
} }
initDevConfigSyn(cameraHandle); initDevConfigSyn(cameraHandle);
mediaFaceImage->setMap(sdk_handle,cameraHandle); mediaFaceImage->setMap(sdk_handle,cameraHandle);
cameraHandle->sdkDevSetAlarmListener(sdk_handle,1); cameraHandle->sdkDevSetAlarmListener(sdk_handle,1);
int synTime=qSetting->value("timer/dev_snap_syn_timer").toInt(); int synTime=qSetting->value("timer/dev_snap_syn_timer").toInt();
uint64 face_frequency=qSetting->value("devices/face_frequency").toULongLong(); uint64 face_frequency=qSetting->value("devices/face_frequency").toULongLong();
cameraHandle->initSdkRealTimeDevSnapSyn(sdk_handle,synTime,face_frequency); cameraHandle->initSdkRealTimeDevSnapSyn(sdk_handle,synTime,face_frequency);
vides_data::requestCameraInfo camera_info; vides_data::requestCameraInfo camera_info;
camera_info.sSn=parameter.sSn; camera_info.sSn=parameter.sSn;
...@@ -769,7 +779,7 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std:: ...@@ -769,7 +779,7 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
cameraHandle->initParkingSpaceInfo(areas); cameraHandle->initParkingSpaceInfo(areas);
Common & instace= Common::getInstance(); Common & instace= Common::getInstance();
QString key =parameter.sDevId + ":" + QString::number(parameter.nDevPort); QString key =parameter.sDevId + ":" + QString::number(parameter.nDevPort);
faceDetectionParkingPushs[key]= cameraHandle; faceDetectionParkingPushs[key]= cameraHandle;
HttpService httpService(parameter.httpUrl); HttpService httpService(parameter.httpUrl);
...@@ -842,15 +852,15 @@ MainWindow::~MainWindow() ...@@ -842,15 +852,15 @@ MainWindow::~MainWindow()
instace.deleteObj(deleteLogFileTimer); instace.deleteObj(deleteLogFileTimer);
instace.deleteObj(deleteFrameFileTimer); instace.deleteObj(deleteFrameFileTimer);
instace.deleteObj(dePermissionSynTimer); instace.deleteObj(dePermissionSynTimer);
for(auto iter = faceDetectionParkingPushs.begin(); iter != faceDetectionParkingPushs.end(); ++iter) { for(auto iter = faceDetectionParkingPushs.begin(); iter != faceDetectionParkingPushs.end(); ++iter) {
instace.deleteObj( iter->second); instace.deleteObj( iter->second);
} }
// 清空 handleMap // 清空 handleMap
faceDetectionParkingPushs.clear(); faceDetectionParkingPushs.clear();
LogHandler::Get().uninstallMessageHandler(); LogHandler::Get().uninstallMessageHandler();
} }
void MainWindow::deleteMkvFileTimer(){ void MainWindow::deleteMkvFileTimer(){
...@@ -869,7 +879,7 @@ void MainWindow::deleteMkvFileTimer(){ ...@@ -869,7 +879,7 @@ void MainWindow::deleteMkvFileTimer(){
} }
} }
} }
} }
void MainWindow::deleteLogFile(){ void MainWindow::deleteLogFile(){
...@@ -878,20 +888,20 @@ void MainWindow::deleteLogFile(){ ...@@ -878,20 +888,20 @@ void MainWindow::deleteLogFile(){
// 前7天 // 前7天
QDateTime dateTime1 = now.addDays(-7); QDateTime dateTime1 = now.addDays(-7);
QDateTime dateTime2; QDateTime dateTime2;
QString logPath = logDir.absoluteFilePath(""); // 日志的路径 QString logPath = logDir.absoluteFilePath(""); // 日志的路径
QDir dir(logPath); QDir dir(logPath);
QStringList filename ; QStringList filename ;
filename << "*.log";//可叠加,可使用通配符筛选 filename << "*.log";//可叠加,可使用通配符筛选
QFileInfoList fileList = dir.entryInfoList(filename); QFileInfoList fileList = dir.entryInfoList(filename);
foreach (QFileInfo f, fileList) { foreach (QFileInfo f, fileList) {
// "."和".."跳过 // "."和".."跳过
if (f.baseName() == "" || f.baseName()=="today" ) if (f.baseName() == "" || f.baseName()=="today" )
continue; continue;
dateTime2 = QDateTime::fromString(f.baseName(), "yyyy-MM-dd"); dateTime2 = QDateTime::fromString(f.baseName(), "yyyy-MM-dd");
if (dateTime2 < dateTime1) { // 只要日志时间小于前7天的时间就删除 if (dateTime2 < dateTime1) { // 只要日志时间小于前7天的时间就删除
dir.remove(f.absoluteFilePath()); dir.remove(f.absoluteFilePath());
...@@ -900,11 +910,11 @@ void MainWindow::deleteLogFile(){ ...@@ -900,11 +910,11 @@ void MainWindow::deleteLogFile(){
} }
void MainWindow::initFaceFaceRecognition() { void MainWindow::initFaceFaceRecognition() {
qSetting->beginGroup("cloudImageMap"); qSetting->beginGroup("cloudImageMap");
QStringList keys = qSetting->childKeys(); QStringList keys = qSetting->childKeys();
foreach(QString key, keys) { foreach(QString key, keys) {
QString value = qSetting->value(key).toString(); QString value = qSetting->value(key).toString();
cloudImageMap[key]=value; cloudImageMap[key]=value;
...@@ -912,7 +922,7 @@ void MainWindow::initFaceFaceRecognition() { ...@@ -912,7 +922,7 @@ void MainWindow::initFaceFaceRecognition() {
qSetting->endGroup(); qSetting->endGroup();
qSetting->beginGroup("localImageMap"); qSetting->beginGroup("localImageMap");
QStringList lokeys = qSetting->childKeys(); QStringList lokeys = qSetting->childKeys();
foreach(QString lk, lokeys) { foreach(QString lk, lokeys) {
// 获取键对应的值 // 获取键对应的值
QString value = qSetting->value(lk).toString(); QString value = qSetting->value(lk).toString();
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment