Commit 476b9ed9 by “liusq”

增加析构同时添加关闭状态

parent 784d6dae
......@@ -39,8 +39,10 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch
}
CameraHandle::~CameraHandle() {
stopRequested_=true;
Common & instace= Common::getInstance();
dev_snap_syn_timer->stop();
QThreadPool::globalInstance()->waitForDone();
instace.deleteObj(dev_snap_syn_timer);
instace.deleteObj(loginParam);
instace.deleteObj(sxMediaFaceImageReq);
......@@ -52,7 +54,7 @@ CameraHandle::~CameraHandle() {
instace.deleteObj( iter->second);
}
parkMap.clear();
QThreadPool::globalInstance()->waitForDone();
}
......@@ -247,7 +249,6 @@ void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 fa
dev_snap_syn_timer->start(syn_timer);
}
void CameraHandle::sdkRealTimeDevSnapSyn(int hDevice) {
QThreadPool* threadPool = QThreadPool::globalInstance();
threadPool->setMaxThreadCount(8);
auto taskSyn = std::bind(&CameraHandle::sdkDevSnapSyn, this, hDevice, this->channel);
......@@ -325,6 +326,7 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
}
int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
if (stopRequested_) return -1;
if (!semaphore.tryAcquire()) {
qInfo() << "sdkDevSnapSyn:正在执行线程";
return -1;
......@@ -368,6 +370,7 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
}
void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
if (stopRequested_) return;
if(hDevice<=0){
qInfo() << "相机断线";
return;
......@@ -440,7 +443,7 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
//当前为空,立场
if(newInfo.getLicensePlate().length()<=0){
HumanDetection &humanDetection=HumanDetection::getInstance();
int car_size = humanDetection.findHuManCar(frame,1,detector);
int car_size = humanDetection.findHuManCar(frame,0x01,detector);
qDebug()<<sSn<<":"<<"当前车形数量:"<<car_size;
if(car_size<=0){
//出场
......@@ -491,18 +494,18 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
QByteArray imgs;
this->matToBase64(frame, imgs);
HttpService httpService(httpUrl);
int faSize =-1;
int faSize =0;
if ((algorithmPermissions & 0x01<<1) != 0) {
faSize=humanDetection.findHuManCar(frame,0,detector);
faSize=humanDetection.findHuManCar(frame,0x00,detector);
if(currentFace!=faSize){
if(currentFace!= faSize){
if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){
vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,sSn,currentTime);
if (resp->code!= 0) {
qInfo()<<"人数变化推送信息推送失败";
}
instace.deleteObj(resp);
currentFace=faSize;
currentFace = faSize;
}
}
}
......@@ -511,7 +514,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
if(uniforms>0 ){
if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){
httpService.setHttpUrl(httpUrl);
vides_data::response* resp=httpService.httpPostUniforms(imgs,faSize,sSn,currentTime);
vides_data::response* resp=httpService.httpPostUniforms(imgs,uniforms,sSn,currentTime);
if (resp->code!= 0) {
qInfo()<<"推送未穿工服人数失败";
}
......
......@@ -126,6 +126,7 @@ private :
int hDevice;
int channel;
QString httpUrl;
bool stopRequested_ = false;
SXSDKLoginParam *loginParam;
SXMediaFaceImageReq *sxMediaFaceImageReq;
......
......@@ -33,6 +33,8 @@ public:
cv::Mat loadImage(const QString &path);
cv::Mat loadImageFromByteStream(const QString& filePath);
void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face);
void initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence);
......
......@@ -2,6 +2,9 @@
#include <QImage>
#include <QThread>
#include <iostream>
#define cimg_display 0
#include "CImg.h"
......@@ -20,31 +23,6 @@ FaceReconition::~FaceReconition(){
FaceReconition* FaceReconition::instance = nullptr;
//cv::Mat FaceReconition::loadImage(const QString &path) {
// // 尝试使用OpenCV直接加载图像
// std::string stdPath = path.toStdString(); // 将路径转换为std::string
// cv::Mat image = cv::imread(stdPath, cv::IMREAD_COLOR); // 尝试加载图像
// if (!image.empty()) {
// qDebug() << "图像以OpenCV成功加载。";
// return image;
// }
// // 使用OpenCV加载失败,尝试使用QImage
// qDebug() << "使用OpenCV加载图像失败,尝试QImage转换。";
// QImage qimg(path);
// if (qimg.isNull()) {
// qDebug() << "QImage也无法加载图像,检查文件路径或文件损坏。";
// return cv::Mat(); // 返回空的cv::Mat对象
// }
// // 转换QImage格式为RGB888
// QImage converted = qimg.convertToFormat(QImage::Format_RGB888);
// cv::Mat mat(converted.height(), converted.width(), CV_8UC3, const_cast<uchar*>(converted.bits()), static_cast<size_t>(converted.bytesPerLine()));
// // 不进行颜色转换,直接返回
// return mat;
//}
cv::Mat FaceReconition::loadImage(const QString &path) {
// 尝试使用OpenCV直接加载图像
std::string stdPath = path.toStdString();
......@@ -53,7 +31,7 @@ cv::Mat FaceReconition::loadImage(const QString &path) {
qDebug() << "图像以OpenCV成功加载。";
return image;
}
return loadImageFromByteStream(path);
}
......@@ -69,7 +47,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
QString bPath = QCoreApplication::applicationDirPath() + "/model_zip/T1_5";
#else
#error "不支持的架构"
#endif
QByteArray && bypath = bPath.toUtf8();
char* spath = bypath.data();
......@@ -87,7 +65,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
customIds.clear();
int i = 0;
qInfo()<< "加载图像size: "<<maps.size();
for (auto it = maps.begin(); it != maps.end(); ++it,++i) {
const QString& key = it->first;
QString& value = it->second;
......@@ -102,7 +80,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
imageData.width = image.cols;
imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
this->configConfidence=confidence;
......@@ -111,25 +89,25 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) {
qInfo() << QString("initSourceImageMap:未检测到人脸: %1").arg(key);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
HF_FaceFeature feature = {0};
ret = HF_FaceFeatureExtract(ctxHandle, imageSteamHandle, multipleFaceData.tokens[0], &feature);
if (ret != HSUCCEED) {
qInfo() << QString("特征提取出错: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
char* tagName = new char[key.size() + 1];
std::strcpy(tagName, key.toStdString().c_str());
HF_FaceFeatureIdentity identity = {0};
......@@ -137,16 +115,16 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numbe
identity.customId = i;
customIds.push_back( identity.customId);
identity.tag = tagName;
ret = HF_FeaturesGroupInsertFeature(ctxHandle, identity);
if (ret != HSUCCEED) {
qInfo() << QString("插入失败: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
delete[] tagName;
ret = HF_ReleaseImageStream(imageSteamHandle);
if (ret == HSUCCEED) {
imageSteamHandle = nullptr;
......@@ -165,9 +143,11 @@ int FaceReconition::featureRemove(){
}
}
}
cv::Mat FaceReconition::loadImageFromByteStream(const QString& filePath) {
try {
try {
// 使用 CImg 读取 JPEG 图像
QByteArray bPath =filePath.toUtf8();
const char* ctr=bPath.data();
......@@ -176,34 +156,29 @@ cv::Mat FaceReconition::loadImageFromByteStream(const QString& filePath) {
// 将 CImg 对象转换为 OpenCV 的 Mat 格式
int width = cimg_image.width();
int height = cimg_image.height();
int channels = cimg_image.spectrum(); // 图像通道数
cv::Mat mat(height, width, CV_8UC3);
// 创建一个对应的空 Mat 对象
cv::Mat opencv_image(height, width, channels == 1 ? CV_8UC1 : CV_8UC3);
// 复制图像数据到 Mat 对象
cimg_forXY(cimg_image, x, y) {
if (channels == 1) {
opencv_image.at<unsigned char>(y, x) = cimg_image(x, y, 0, 0);
} else {
// 注意 OpenCV 默认使用 BGR 而不是 RGB
cv::Vec3b& opencv_pixel = opencv_image.at<cv::Vec3b>(y, x);
opencv_pixel[2] = cimg_image(x, y, 0, 0); // Red
opencv_pixel[1] = cimg_image(x, y, 0, 1); // Green
opencv_pixel[0] = cimg_image(x, y, 0, 2); // Blue
}
// 注意OpenCV默认是BGR顺序
// CImg中像素的存取方式是 (x, y, z, c) 其中c是颜色通道
mat.at<cv::Vec3b>(y, x)[2] = cimg_image(x, y, 0, 0); // R
mat.at<cv::Vec3b>(y, x)[1] = cimg_image(x, y, 0, 1); // G
mat.at<cv::Vec3b>(y, x)[0] = cimg_image(x, y, 0, 2); // B
}
return opencv_image;
return mat;
} catch (const CImgException& e) {
qDebug() << "CImg Error: " << e.what();
return cv::Mat();;
return cv::Mat();;
} catch (const cv::Exception& e) {
qDebug() << "OpenCV Error: " << e.what();
return cv::Mat(); ;
}
return cv::Mat();
}
void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
//QReadLocker locker(&rwLock);
HResult ret;
......@@ -214,7 +189,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
imageData.width = source.cols;
imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
if (ret != HSUCCEED) {
......@@ -223,12 +198,12 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
}
HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) {
qDebug()<<QString("search 未检测到人脸");
return ;
}
std::vector<std::vector<float>> features;
// 被搜索的目标这边推荐使用拷贝式的接口来获取特征向量
HInt32 featureNum;
......@@ -257,7 +232,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
qInfo()<<QString("搜索失败: %1").arg(ret);
return ;
}
qDebug()<<QString("搜索置信度: %1").arg(confidence);
qDebug()<<QString("匹配到的tag: %1").arg(searchIdentity.tag);
qDebug()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
......@@ -272,7 +247,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
newface.height=multipleFaceData.rects[rect].height;
faces.push_back(newface);
}
rect++;
}
ret = HF_ReleaseImageStream(imageSteamHandle);
......
......@@ -3,6 +3,7 @@
vides_data::responseStsCredentials HttpService::stsCredentials;
HttpService::HttpService() {
}
HttpService::HttpService(QString httpUrl) {
this->httpUrl=httpUrl;
......
......@@ -15,7 +15,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector
// 执行一帧目标检测
TCV_HumanDetectorProcessFrame(detector, stream);
int num=-1;
int num=0;
if(res==0x00 || res==0x02){
num= TCV_HumanDetectorGetNumOfHuman(detector);
if (num > 0 && res==0x02) {
......@@ -32,11 +32,11 @@ int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector
}
num=num_uniforms;
}
qDebug() << (res == 0 ? "Number of people detected:" : "Number of people with uniform == 0 detected:") << num;
qDebug() << (res == 0 ? "findHuManCar 检测到的人数:" : "findHuManCar 未穿工服的人数:") << num;
}else if (res==0x01) {
num=TCV_HumanDetectorGetNumOfCar(detector);
qDebug() << "Number of cars detected:" << num;
qDebug() << "findHuManCar 检测到的汽车数量:" << num;
}else {
qDebug() << "参数错误";
......
#include "ScopeSemaphoreExit.h"
ScopeSemaphoreExit::ScopeSemaphoreExit(std::function<void()> onExit)
: onExit_(onExit) {}
ScopeSemaphoreExit::ScopeSemaphoreExit(std::function<void()> onExit)
: onExit_(onExit) {}
ScopeSemaphoreExit::~ScopeSemaphoreExit() {
if (onExit_) onExit_();
}
ScopeSemaphoreExit::~ScopeSemaphoreExit() {
if (onExit_) onExit_();
}
......@@ -272,23 +272,63 @@ inline bool pingAddress(const QString &address) {
return output.contains("1 packets transmitted, 1 received");
}
inline int GetCpuIdByAsm_arm(char* cpu_id)
{
FILE *fp = fopen("/proc/cpuinfo", "r");
if(nullptr == fp)
{
qDebug()<<"failed to open cpuinfo";
return -1;
}
inline QString getCpuSerialNumber() {
QProcess process;
// 启动一个进程运行shell命令
process.start("sh", QStringList() << "-c" << "cat /proc/cpuinfo | grep Serial");
// 等待命令执行完成
process.waitForFinished();
// 读取命令的标准输出
QString output = process.readAllStandardOutput();
char cpuSerial[100] = {0};
while(!feof(fp))
{
memset(cpuSerial, 0, sizeof(cpuSerial));
fgets(cpuSerial, sizeof(cpuSerial) - 1, fp); // leave out \n
char* pch = strstr(cpuSerial,"Serial");
if (pch)
{
char* pch2 = strchr(cpuSerial, ':');
if (pch2)
{
memmove(cpu_id, pch2 + 2, strlen(cpuSerial));
QString serialNumber;
if (!output.isEmpty()) {
// 已经确保了输出仅包含 Serial 行,所以直接分割并提取
serialNumber = output.split(":").last().trimmed();
break;
}
else
{
fclose(fp);
return -1;
}
}
}
fclose(fp);
return 0;
}
return serialNumber;
inline QString getCpuSerialNumber() {
QFile file("/proc/cpuinfo");
if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) {
qDebug() << "无法打开 /proc/cpuinfo 文件";
return QString();
}
QTextStream in(&file);
QString line;
while (!in.atEnd()) {
line = in.readLine();
if (line.startsWith("Serial")) {
QStringList parts = line.split(":");
if (parts.size() > 1) {
QString serial = parts.at(1).trimmed();
return serial;
}
}
}
return QString();
}
}
......
......@@ -11,46 +11,46 @@ TEMPLATE = app
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.0.0\\\"
#QMAKE_LIBDIR += /usr/local/lib
#INCLUDEPATH+=/usr/local/include/opencv4
#INCLUDEPATH+=/usr/local/include/hyperface
#INCLUDEPATH+=/usr/local/include/hyper
#INCLUDEPATH+=/usr/local/include/XNetSDK
#INCLUDEPATH+=/usr/local/include/human
#INCLUDEPATH+=/usr/local/include/CImg
unix:contains(QMAKE_HOST.arch, x86_64) {
QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
}
unix:contains(QMAKE_HOST.arch, arm) {
QMAKE_LIBDIR += /usr/local/lib
}
# 根据编译器类型选择库路径和头文件路径
unix: {
# x86 架构
contains(QMAKE_HOST.arch, x86_64) {
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/CImg
}
# ARM 架构
contains(QMAKE_HOST.arch, arm) {
INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human
}
}
DEFINES += APP_VERSION=\\\"1.0.1\\\"
QMAKE_LIBDIR += /usr/local/lib
INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human
INCLUDEPATH+=/usr/local/include/CImg
#unix:contains(QMAKE_HOST.arch, x86_64) {
# QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
#}
#unix:contains(QMAKE_HOST.arch, arm) {
# QMAKE_LIBDIR += /usr/local/lib
#}
## 根据编译器类型选择库路径和头文件路径
#unix: {
# # x86 架构
# contains(QMAKE_HOST.arch, x86_64) {
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/CImg
# }
# # ARM 架构
# contains(QMAKE_HOST.arch, arm) {
# INCLUDEPATH+=/usr/local/include/opencv4
# INCLUDEPATH+=/usr/local/include/hyperface
# INCLUDEPATH+=/usr/local/include/hyper
# INCLUDEPATH+=/usr/local/include/XNetSDK
# INCLUDEPATH+=/usr/local/include/human
# }
#}
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
......@@ -76,7 +76,7 @@ LIBS += -lopencv_core \
-lopencv_objdetect \
-lsohuman \
# -lssl \
# -lcrypto \
# -lcrypto \ sudo apt-get install libjpeg-dev libpng-dev
-lc \
-lXNetSDK
#-lz
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment