Commit 784d6dae by “liusq”

新增算法权限和推送设备版本,ip

parent 264b6d6a
......@@ -18,8 +18,8 @@ CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &ch
image_save(imageSave),
semaphore(1) {
connect(this, SIGNAL(afterDownloadFile(int,int,QString)), this, SLOT(pushRecordToCloud(int,int,QString)),Qt::QueuedConnection);
detector = TCV_CreateHumanDetector();
// 设置检测得分阈值 默认0.5
detector = TCV_CreateHumanDetector(1);
TCV_HumanDetectorSetHumanThreshold(detector,0.5f);
TCV_HumanDetectorSetCarThreshold(detector,0.2f);
......@@ -414,10 +414,6 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
updateImage(image, currentTime);
}
void CameraHandle::setTimeoutMs(int timeoutMs){
this->timeoutMs=timeoutMs;
}
void CameraHandle::matToBase64(const cv::Mat &image, QByteArray &base64Data) {
std::vector<unsigned char> buffer;
std::vector<int> params{cv::IMWRITE_JPEG_QUALITY, 90};
......@@ -493,10 +489,12 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
printf("updateImage retryCount: %d \n", ++ii);
//faceRecognition.search(frame,imageHandleList,names);
QByteArray imgs;
int faSize=humanDetection.findHuManCar(frame,0,detector);
this->matToBase64(frame, imgs);
HttpService httpService(httpUrl);
int faSize =-1;
if ((algorithmPermissions & 0x01<<1) != 0) {
faSize=humanDetection.findHuManCar(frame,0,detector);
if(currentFace!=faSize){
if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){
vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,sSn,currentTime);
......@@ -507,7 +505,22 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
currentFace=faSize;
}
}
if(faSize>0){
}
if ((algorithmPermissions & 0x01<<2) != 0) {
int uniforms=humanDetection.findHuManCar(frame,0x02,detector);
if(uniforms>0 ){
if(faceCount.load(std::memory_order_relaxed)%face_frequency==0){
httpService.setHttpUrl(httpUrl);
vides_data::response* resp=httpService.httpPostUniforms(imgs,faSize,sSn,currentTime);
if (resp->code!= 0) {
qInfo()<<"推送未穿工服人数失败";
}
instace.deleteObj(resp);
}
}
}
if(faSize>0 ){
qDebug() << "faceRecognition.doesItExistEmployee Current thread ID: " << QThread::currentThreadId();
std::list<vides_data::faceRecognitionResult>faces;
faceRecognition.doesItExistEmployee(frame,faces);
......@@ -539,6 +552,10 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
}
}
}
//关闭车牌识别
if ((algorithmPermissions & 0x01) == 0) {
return ;
}
QString lpNumber;
vides_data::requestLicensePlate plate;
plate.sn=sSn;
......@@ -710,7 +727,23 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
}
}
}
void CameraHandle::findIp(QString &ip){
ip=QString::fromStdString(loginParam->sDevId);
}
void CameraHandle::findFirmwareVersion(QString &firmwareVersion){
char szOutBuffer[1024] = { 0 };
int nLen = sizeof(szOutBuffer);;
int nResult = XSDK_DevGetSysConfigSyn(hDevice, JK_SystemInfo, szOutBuffer, &nLen, 4000, JK_SystemInfo_MsgId);
if (nResult >= 0)
{
XSDK_CFG::SystemInfo cfg;
cfg.Parse(szOutBuffer);
const char* SoftWareVersion = cfg.SoftWareVersion.ToString();
firmwareVersion=QString::fromStdString(SoftWareVersion);
}
}
void CameraHandle::pushRecordToCloud(int id, int recognitionType, QString ossUrl){
HttpService httpService(ossUrl);
Common & instace= Common::getInstance();
......@@ -826,6 +859,20 @@ void CameraHandle::sdkDevSpvMn(const char *spvMn){
qInfo() << "sdkDevSpvMn 28181->修改失败"<<res;
}
}
void CameraHandle::deviceReboot(){
int nRet=0;
XSDK_CFG::OPMachine cfg;
cfg.Action.SetValue("Reboot");
const char* pCfg = cfg.ToString();
nRet = XSDK_DevSetSysConfig(hDevice, JK_OPMachine, pCfg, strlen(pCfg), 1, 5000, EXCMD_SYSMANAGER_REQ);
if(nRet<0){
qInfo() << sSn<<"重启相机失败"<<nRet;
return ;
}
QString ip=QString::fromUtf8(loginParam->sDevId);
MainWindow::sp_this->clearOfflineCameraHandle(ip,loginParam->nDevPort);}
bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2) {
QPolygonF realPolygon;
......@@ -947,6 +994,12 @@ int CameraHandle::determineArea(ParkingSpaceInfo &prakArea){
return areaOfMaxIntersection;
}
void CameraHandle::initAlgorithmPermissions(__uint8_t algorithm){
if(algorithm!=algorithmPermissions){
this->algorithmPermissions=algorithm;
}
}
void CameraHandle::initParkingSpaceInfo(const std::list<vides_data::responseArea> &areas){
int index = 1;
for (auto area = areas.begin(); area != areas.end(); ++area) {
......
......@@ -11,6 +11,8 @@
#include "Json_Header/System_TimeZone.h"
#include "Json_Header/RecordCfg.h"
#include "Json_Header/NetWork_SPVMN.h"
#include "Json_Header/SystemInfo.h"
#include "Json_Header/OPMachine.h"
#include "mainwindow.h"
#include "ParkingSpaceInfo.h"
#include "so_human_sdk.h"
......@@ -73,19 +75,24 @@ public:
void sdkEncodeCfg(const char *enCode);
//28181更新
void sdkDevSpvMn(const char* spvMn);
//重启设备
void deviceReboot();
//获取固件版本
void findFirmwareVersion(QString &firmwareVersion);
//获取ip
void findIp(QString &ip);
void sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
QString startTimer,QString endTime);
void setTimeoutMs(int timeoutMs);
QString getSSn();
int getMediaHandle();
void setMediaHandle(int mediaHandle);
void setCurrentFace(int currentFace);
void initAlgorithmPermissions(__uint8_t algorithm);
void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas);
bool compareLists(const std::list<vides_data::responseArea>& newAreas);
......@@ -93,6 +100,7 @@ public:
void updateParkMapAndParkingSpaceInfos(const std::list<vides_data::responseArea>&newAreas);
std::map<int, vides_data::responseRecognitionData>&getVideoCurrentData();
std::map<QString, QString>&getCurrentData();
// 检查点是否在多边形内
......@@ -150,13 +158,13 @@ private :
QSemaphore semaphore;
int timeoutMs;
int image_save;
std::atomic<uint64> faceCount;
uint64 face_frequency;
__uint8_t algorithmPermissions;
};
#endif // CAMERAHANDLE_H
#ifndef FACERECOGNITION_H
#define FACERECOGNITION_H
#include "hyperface.h"
#include <opencv2/opencv.hpp>
#include<QCoreApplication>
#include "herror.h"
#include "LogHandle.h"
#include "VidesData.h"
#include <QReadWriteLock>
#include <opencv2/opencv.hpp>
#include<QCoreApplication>
class FaceReconition
{
......@@ -19,8 +20,6 @@ private:
std::vector<int32_t>customIds;
QReadWriteLock rwLock;
FaceReconition();
~FaceReconition();
......@@ -32,10 +31,11 @@ public:
return instance;
}
cv::Mat loadImage(const QString &path);
cv::Mat loadImageFromByteStream(const QString& filePath);
void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face);
void initSourceImageMap(std::map<QString,QString>&maps,float confidence);
void initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence);
int featureRemove();
......
......@@ -2,6 +2,11 @@
#include <QImage>
#include <QThread>
#define cimg_display 0
#include "CImg.h"
using namespace cimg_library;
FaceReconition::FaceReconition() {}
......@@ -15,33 +20,44 @@ FaceReconition::~FaceReconition(){
FaceReconition* FaceReconition::instance = nullptr;
//cv::Mat FaceReconition::loadImage(const QString &path) {
// // 尝试使用OpenCV直接加载图像
// std::string stdPath = path.toStdString(); // 将路径转换为std::string
// cv::Mat image = cv::imread(stdPath, cv::IMREAD_COLOR); // 尝试加载图像
// if (!image.empty()) {
// qDebug() << "图像以OpenCV成功加载。";
// return image;
// }
// // 使用OpenCV加载失败,尝试使用QImage
// qDebug() << "使用OpenCV加载图像失败,尝试QImage转换。";
// QImage qimg(path);
// if (qimg.isNull()) {
// qDebug() << "QImage也无法加载图像,检查文件路径或文件损坏。";
// return cv::Mat(); // 返回空的cv::Mat对象
// }
// // 转换QImage格式为RGB888
// QImage converted = qimg.convertToFormat(QImage::Format_RGB888);
// cv::Mat mat(converted.height(), converted.width(), CV_8UC3, const_cast<uchar*>(converted.bits()), static_cast<size_t>(converted.bytesPerLine()));
// // 不进行颜色转换,直接返回
// return mat;
//}
cv::Mat FaceReconition::loadImage(const QString &path) {
// 尝试使用OpenCV直接加载图像
std::string stdPath = path.toStdString(); // 将路径转换为std::string
cv::Mat image = cv::imread(stdPath, cv::IMREAD_COLOR); // 尝试加载图像
std::string stdPath = path.toStdString();
cv::Mat image = cv::imread(stdPath, cv::IMREAD_COLOR);
if (!image.empty()) {
qDebug() << "图像以OpenCV成功加载。";
return image;
}
// 使用OpenCV加载失败,尝试使用QImage
qDebug() << "使用OpenCV加载图像失败,尝试QImage转换。";
QImage qimg(path);
if (qimg.isNull()) {
qDebug() << "QImage也无法加载图像,检查文件路径或文件损坏。";
return cv::Mat(); // 返回空的cv::Mat对象
}
// 转换QImage格式为RGB888
QImage converted = qimg.convertToFormat(QImage::Format_RGB888);
cv::Mat mat(converted.height(), converted.width(), CV_8UC3, const_cast<uchar*>(converted.bits()), converted.bytesPerLine());
// 将RGB转换为BGR,以便OpenCV处理
cv::cvtColor(mat, mat, cv::COLOR_RGB2BGR);
return mat;
return loadImageFromByteStream(path);
}
void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float confidence){
void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence){
//QWriteLocker locker(&rwLock);
featureRemove();
HResult ret;
......@@ -62,7 +78,7 @@ void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float con
HF_DetectMode detMode = HF_DETECT_MODE_IMAGE; // 选择图像模式 即总是检测
if(ctxHandle==nullptr){
// 创建ctx
ret = HF_CreateFaceContextFromResourceFileOptional(path, option, detMode, 5, &ctxHandle);
ret = HF_CreateFaceContextFromResourceFileOptional(path, option, detMode, numberFaces, &ctxHandle);
if (ret != HSUCCEED) {
qInfo() << QString("Create ctx error: %1").arg(ret);
return;
......@@ -149,6 +165,45 @@ int FaceReconition::featureRemove(){
}
}
}
cv::Mat FaceReconition::loadImageFromByteStream(const QString& filePath) {
try {
// 使用 CImg 读取 JPEG 图像
QByteArray bPath =filePath.toUtf8();
const char* ctr=bPath.data();
CImg<unsigned char> cimg_image(ctr);
// 将 CImg 对象转换为 OpenCV 的 Mat 格式
int width = cimg_image.width();
int height = cimg_image.height();
int channels = cimg_image.spectrum(); // 图像通道数
// 创建一个对应的空 Mat 对象
cv::Mat opencv_image(height, width, channels == 1 ? CV_8UC1 : CV_8UC3);
// 复制图像数据到 Mat 对象
cimg_forXY(cimg_image, x, y) {
if (channels == 1) {
opencv_image.at<unsigned char>(y, x) = cimg_image(x, y, 0, 0);
} else {
// 注意 OpenCV 默认使用 BGR 而不是 RGB
cv::Vec3b& opencv_pixel = opencv_image.at<cv::Vec3b>(y, x);
opencv_pixel[2] = cimg_image(x, y, 0, 0); // Red
opencv_pixel[1] = cimg_image(x, y, 0, 1); // Green
opencv_pixel[0] = cimg_image(x, y, 0, 2); // Blue
}
}
return opencv_image;
} catch (const CImgException& e) {
qDebug() << "CImg Error: " << e.what();
return cv::Mat();;
} catch (const cv::Exception& e) {
qDebug() << "OpenCV Error: " << e.what();
return cv::Mat(); ;
}
return cv::Mat();
}
void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
//QReadLocker locker(&rwLock);
HResult ret;
......@@ -207,34 +262,6 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
qDebug()<<QString("匹配到的tag: %1").arg(searchIdentity.tag);
qDebug()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
// Face Pipeline
ret = HF_MultipleFacePipelineProcess(ctxHandle, imageSteamHandle, &multipleFaceData, parameter);
if (ret != HSUCCEED) {
//printf("pipeline执行失败: %ld", ret);
qInfo()<<QString("pipeline执行失败: %1").arg(ret);
return ;
}
HF_RGBLivenessConfidence livenessConfidence = {0};
ret = HF_GetRGBLivenessConfidence(ctxHandle, &livenessConfidence);
if (ret != HSUCCEED) {
qInfo()<<QString("获取活体数据失败1");
return ;
}
//printf("活体置信度: %f", livenessConfidence.confidence[0]);
qDebug()<<QString("活体置信度====>:%1").arg(livenessConfidence.confidence[0],0,'Q',4);
HF_FaceMaskConfidence maskConfidence = {0};
ret = HF_GetFaceMaskConfidence(ctxHandle, &maskConfidence);
if (ret != HSUCCEED) {
qInfo()<<QString("获口罩数据失败");
return ;
}
HInt32 faceNum;
ret = HF_FeatureGroupGetCount(ctxHandle, &faceNum);
if (ret != HSUCCEED) {
// printf("获取失败");
qInfo()<<QString("获取失败");
return ;
}
//printf("人脸特征数量: %d", faceNum);
if (confidence > configConfidence) {
vides_data::faceRecognitionResult newface;
......
......@@ -15,35 +15,51 @@ HttpService::~HttpService() {
vides_data::response* HttpService::httpPostDeviceStatus(vides_data::requestDeviceStatus & deviceStatus) {
httpUrl.append("/api/v1.0/device/ping");
// 创建主 JSON 对象
QJsonObject json;
json.insert("sn",deviceStatus.sSn);
json.insert("type",deviceStatus.type);
json.insert("state",deviceStatus.status);
json.insert("ip_addr",deviceStatus.ip_addr);
QJsonDocument jsonDoc;
jsonDoc.setObject(json);
QByteArray bytearr= jsonDoc.toJson(QJsonDocument::Compact);
json.insert("sn", deviceStatus.sSn);
json.insert("type", deviceStatus.type);
json.insert("state", deviceStatus.status);
json.insert("ip_addr", deviceStatus.ip_addr);
json.insert("firmware_version", deviceStatus.firmware_version); // 将固件版本添加到主 JSON 对象中
// 创建摄像头信息列表 JSON 数组
QJsonArray cameraArray;
for (const auto& cameraInfo : deviceStatus.camera_info_list) {
QJsonObject cameraObject;
cameraObject.insert("sn", cameraInfo.sSn);
cameraObject.insert("ip_addr", cameraInfo.ip_addr);
cameraObject.insert("firmware_version", cameraInfo.firmware_version);
cameraArray.append(cameraObject);
}
// 将摄像头信息列表添加到主 JSON 对象中
json.insert("camera_info_list", cameraArray);
vides_data::response *resp=new vides_data::response();
// 将 JSON 对象转换为 JSON 文档
QJsonDocument jsonDoc(json);
QByteArray bytearr = jsonDoc.toJson(QJsonDocument::Compact);
vides_data::response *resp = new vides_data::response();
QNetworkRequest request;
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.post(request,bytearr)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qDebug()<<"httpPostDeviceStatus"<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
if (m_httpClient.post(request, bytearr)) {
QByteArray && byte = m_httpClient.text().toUtf8();
QJsonDocument docujson = QJsonDocument::fromJson(byte.data());
QJsonObject maps = docujson.object();
QVariantMap map = std::move(maps.toVariantMap());
resp->code = map["code"].toInt();
resp->msg = map["message"].toString();
} else {
qDebug() << "httpPostDeviceStatus" << m_httpClient.errorCode();
resp->code = 2;
resp->msg = m_httpClient.errorString();
}
return resp;
}
vides_data::response* HttpService::httpPostRecord(int id,int recongnition_type,QString sn,QString videw_addr){
httpUrl.append("/api/v1.0/recongnition/record");
......@@ -108,7 +124,8 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d
HttpService::stsCredentials.endpoint=responseData.sts_credentials.endpoint = stsCredentialsObj["endpoint"].toString();
HttpService::stsCredentials.expiration=responseData.sts_credentials.expiration = stsCredentialsObj["expiration"].toString();
HttpService::stsCredentials.security_token=responseData.sts_credentials.security_token = stsCredentialsObj["security_token"].toString();
int algorithm= dataObj["algorithm"].toInt();
responseData.algorithm=algorithm;
QJsonArray dataArray = dataObj["list"].toArray();
for (const QJsonValue& value : dataArray) {
vides_data::responseDeviceStatus status;
......@@ -116,6 +133,7 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d
status.sSn = deviceObject["sn"].toString();
status.type = static_cast<int8_t>(deviceObject["type"].toInt());
status.is_reboot=deviceObject["is_reboot"].toBool();
status.merchant_id = static_cast<int8_t>(deviceObject["merchant_id"].toInt());
// 处理"areas"数组
......@@ -268,6 +286,39 @@ vides_data::response* HttpService::httpFindFaceReconition(QString &serialNumber,
}
return resp;
}
vides_data::response *HttpService::httpPostUniforms(QByteArray &img,int &number,QString sn,qint64 time){
httpUrl.append("/api/v1.0/recongnition/uniform");
QJsonObject json;
json.insert("img", QJsonValue::fromVariant(img));
json.insert("sn",sn);
json.insert("number",number);
json.insert("time",QJsonValue::fromVariant(time));
QJsonDocument jsonDoc;
jsonDoc.setObject(json);
QByteArray bytearr= jsonDoc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.post(request,bytearr)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =maps.toVariantMap();
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
qDebug()<<"httpPostUniforms"<<m_httpClient.errorString();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &number,QString sn,qint64 time){
httpUrl.append("/api/v1.0/recongnition/population");
QJsonObject json;
......
......@@ -36,6 +36,9 @@ public:
//人数变化推送
vides_data::response *httpPostFacePopulation(QByteArray &img,int &number,QString sn,qint64 time);
//工服推送
vides_data::response *httpPostUniforms(QByteArray &img,int &number,QString sn,qint64 time);
//客户端组列表
vides_data::response *httpFindStream(QString &serialNumber);
......
......@@ -15,10 +15,34 @@ int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector
// 执行一帧目标检测
TCV_HumanDetectorProcessFrame(detector, stream);
int num = (res == 0) ? TCV_HumanDetectorGetNumOfHuman(detector) :TCV_HumanDetectorGetNumOfCar(detector);
qDebug() << (res == 0 ? "Number of people detected:" : "Number of cars detected:") << num;
TCV_ReleaseCameraStream(stream);
int num=-1;
if(res==0x00 || res==0x02){
num= TCV_HumanDetectorGetNumOfHuman(detector);
if (num > 0 && res==0x02) {
// 创建一个接收检测结果的对象数组
TCV_ObjectLocation result[num];
// 提取行人检测结果
TCV_HumanDetectorGetHumanLocation(detector, result, num);
int num_uniforms = 0;
//工服
for (int i = 0; i < num; ++i) {
if (result[i].uniform == 0) {
++num_uniforms;
}
}
num=num_uniforms;
}
qDebug() << (res == 0 ? "Number of people detected:" : "Number of people with uniform == 0 detected:") << num;
}else if (res==0x01) {
num=TCV_HumanDetectorGetNumOfCar(detector);
qDebug() << "Number of cars detected:" << num;
}else {
qDebug() << "参数错误";
}
TCV_ReleaseCameraStream(stream);
return num;
}
......@@ -21,6 +21,7 @@ void ParkingSpaceInfo::removeQueue(){
queuels.dequeue();
}
}
void ParkingSpaceInfo::removeNoQueue() {
QMutexLocker locker(&queueMutex);
if (!queuels.isEmpty() && queuels.size() > 3) {
......
......@@ -23,14 +23,27 @@ struct response
QString msg;
response() {}
};
struct requestCameraInfo{
QString ip_addr;
QString firmware_version;
QString sSn;
requestCameraInfo() {}
};
struct requestDeviceStatus
{
QString sSn;
int8_t type;
int8_t status;
QString ip_addr;
QString firmware_version;
std::list<requestCameraInfo>camera_info_list;
requestDeviceStatus() {}
};
struct responseStsCredentials{
QString access_key_id;
QString access_key_secret;
......@@ -67,10 +80,12 @@ struct responseDeviceStatus
QString sSn;
int8_t type;
int8_t merchant_id;
bool is_reboot;
std::list<responseArea>areas;
responseDeviceStatus() {}
responseDeviceStatus() : is_reboot(false) {}
};
struct responseDeviceData{
int algorithm ;
std::list<responseDeviceStatus> list;
responseStsCredentials sts_credentials;
};
......@@ -257,17 +272,20 @@ inline bool pingAddress(const QString &address) {
return output.contains("1 packets transmitted, 1 received");
}
inline QString getSerialNumber() {
QProcess process;
// 使用管道将两个命令的执行结果串联起来,直接查找包含"Serial"的行
process.start("bash", QStringList() << "-c" << "cat /proc/cpuinfo | grep Serial");
process.waitForFinished(-1); // 等待命令执行完成
inline QString getCpuSerialNumber() {
QProcess process;
// 启动一个进程运行shell命令
process.start("sh", QStringList() << "-c" << "cat /proc/cpuinfo | grep Serial");
// 等待命令执行完成
process.waitForFinished();
// 读取命令的标准输出
QString output = process.readAllStandardOutput();
QString serialNumber;
if (!output.isEmpty()) {
// 已经确保了输出仅包含 Serial 行,所以直接分割并提取
serialNumber = output.split(":").at(1).trimmed();
serialNumber = output.split(":").last().trimmed();
}
return serialNumber;
......
......@@ -3,7 +3,7 @@ QT += core gui network multimedia sql concurrent
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
CONFIG += c++11
TARGET = GAMERAVIDEO
TARGET = cameravideo
TEMPLATE = app
# The following define makes your compiler emit warnings if you use
......@@ -11,45 +11,46 @@ TEMPLATE = app
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.0.0\\\"
QMAKE_LIBDIR += /usr/local/lib
INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human
#unix:contains(QMAKE_HOST.arch, x86_64) {
# QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
#}
#unix:contains(QMAKE_HOST.arch, arm) {
# QMAKE_LIBDIR += /usr/local/lib
#}
## 根据编译器类型选择库路径和头文件路径
#unix: {
# # x86 架构
# contains(QMAKE_HOST.arch, x86_64) {
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
# }
# # ARM 架构
# contains(QMAKE_HOST.arch, arm) {
# INCLUDEPATH+=/usr/local/include/opencv4
# INCLUDEPATH+=/usr/local/include/hyperface
# INCLUDEPATH+=/usr/local/include/hyper
# INCLUDEPATH+=/usr/local/include/XNetSDK
# INCLUDEPATH+=/usr/local/include/human
# }
#}
#QMAKE_LIBDIR += /usr/local/lib
#INCLUDEPATH+=/usr/local/include/opencv4
#INCLUDEPATH+=/usr/local/include/hyperface
#INCLUDEPATH+=/usr/local/include/hyper
#INCLUDEPATH+=/usr/local/include/XNetSDK
#INCLUDEPATH+=/usr/local/include/human
#INCLUDEPATH+=/usr/local/include/CImg
unix:contains(QMAKE_HOST.arch, x86_64) {
QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
}
unix:contains(QMAKE_HOST.arch, arm) {
QMAKE_LIBDIR += /usr/local/lib
}
# 根据编译器类型选择库路径和头文件路径
unix: {
# x86 架构
contains(QMAKE_HOST.arch, x86_64) {
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/CImg
}
# ARM 架构
contains(QMAKE_HOST.arch, arm) {
INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human
}
}
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
......
......@@ -31,11 +31,12 @@ MainWindow::MainWindow()
FaceReconition &faceRecognition = FaceReconition::getInstance();
float confidence=qSetting->value("devices/confidence").toFloat();
int faceNumbers=qSetting->value("devices/faceNumbers").toInt();
if(localImageMap.size()>0){
faceRecognition.initSourceImageMap(localImageMap,confidence);
faceRecognition.initSourceImageMap(localImageMap,faceNumbers,confidence);
}
float carConfidence=qSetting->value("devices/carConfidence").toFloat();
//LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance();
//licensePlateRecogn.initHlprContext(modelPaths,qSetting->value("licensePlateRecognition/car_cascade_path").toString(),carConfidence);
QString httpurl;
......@@ -75,6 +76,16 @@ MainWindow::MainWindow()
}
}
CameraHandle* MainWindow::findHandle(QString sn){
for (auto it = faceDetectionParkingPushs.begin(); it != faceDetectionParkingPushs.end(); ++it) {
QString currentSn = it->second->getSSn();
if (currentSn == sn) {
CameraHandle* matchedHandle = it->second;
return matchedHandle;
}
}
return nullptr;
}
void MainWindow::sendJsonResponse(QTcpSocket* socket, int code, const QString& data, const QString& msg){
QJsonObject jsonResponse;
jsonResponse["code"] = code;
......@@ -257,8 +268,10 @@ void MainWindow::updateLocalFace(const QString &httpurl) {
faceRecognition.featureRemove();
} else {
float confidence=qSetting->value("devices/confidence").toFloat();
int faceNumbers=qSetting->value("devices/faceNumbers").toInt();
qDebug()<<"startMap != endMap-->";
faceRecognition.initSourceImageMap(localImageMap,confidence);
faceRecognition.initSourceImageMap(localImageMap,faceNumbers, confidence);
}
}
instance.deleteObj(res);
......@@ -268,12 +281,11 @@ void MainWindow::findLocalSerialNumber(QString &serialNumber){
if(vides_data::isVirtualMachine()){
serialNumber = QSysInfo::machineUniqueId();
}else{
serialNumber =vides_data::getSerialNumber();
if (!serialNumber.isEmpty()) {
qDebug() << "CPU Serial Number:" << serialNumber;
} else {
qDebug() << "CPU Serial Number not found!";
return;
if(localSn.length()>0){
serialNumber=localSn;
}else {
serialNumber =vides_data::getCpuSerialNumber();
localSn=serialNumber;
}
}
}
......@@ -306,21 +318,17 @@ void MainWindow::startCamera(const QString &httpurl){
// QString serialNumber = QSysInfo::machineUniqueId();
QString serialNumber;
findLocalSerialNumber(serialNumber);
qInfo()<<"serialNumber==:"<<serialNumber;
vides_data::requestDeviceStatus reStatus;
reStatus.sSn=serialNumber;
reStatus.status=1;
reStatus.type=1;
reStatus.ip_addr=instace.GetLocalIp();
reStatus.firmware_version=APP_VERSION;
HttpService httpService(httpurl);
vides_data::response *res=httpService.httpPostDeviceStatus(reStatus);
if(res->code!=0){
qInfo()<<"盒子状态上报失败 code:"<<res->code<<"msg:"<<res->data;
}
instace.deleteObj(res);
httpService.setHttpUrl(httpurl);
vides_data::response *re= httpService.httpFindCameras(serialNumber,devices);
if(re->code==0 || re->code==20004){
QString username = qSetting->value("devices/username").toString();
......@@ -331,6 +339,7 @@ void MainWindow::startCamera(const QString &httpurl){
instace.deleteObj(re);
return ;
}
int alg=devices.algorithm;
for (const auto& device : devices.list) {
if(localDevices.count(device.sSn)>0 ){
vides_data::localDeviceStatus* localDevice= localDevices.at(device.sSn);
......@@ -350,17 +359,31 @@ void MainWindow::startCamera(const QString &httpurl){
parameter.sSn=device.sSn;
//parameter.rtspUrl="rtsp://192.168.10.131:554/user=admin&password=&channel=1&stream=1.sdp?";
//parameter.rtspUrl=std::move(QString("rtsp://admin:@%1/stream1").arg(ipAddress));
this->initCameras(parameter,device.areas);
this->initCameras(parameter,device.areas,alg,reStatus.camera_info_list);
}
else {
CameraHandle *indexHandle=findHandle(device.sSn);
if(indexHandle!=nullptr &&device.is_reboot){
indexHandle->deviceReboot();
}else {
auto it = this->faceDetectionParkingPushs.find(key);
if (it != this->faceDetectionParkingPushs.end()) {
CameraHandle* offlineCameraHandle = it->second; // 注意使用->second获取值
vides_data::requestCameraInfo camera_info;
camera_info.sSn=offlineCameraHandle->getSSn();
offlineCameraHandle->findIp(camera_info.ip_addr);
offlineCameraHandle->findFirmwareVersion(camera_info.firmware_version);
reStatus.camera_info_list.push_front(camera_info);
__uint8_t new_algorithm= intToUint8t(alg);
offlineCameraHandle->initAlgorithmPermissions(new_algorithm);
if(!offlineCameraHandle->compareLists(device.areas)){
offlineCameraHandle->updateParkMapAndParkingSpaceInfos(device.areas);
}
}
}
}
}
}
this->deleteCloudNotCamer(localDevices, devices.list);
......@@ -373,6 +396,13 @@ void MainWindow::startCamera(const QString &httpurl){
// 清空 localDevices 容器
localDevices.clear();
}
httpService.setHttpUrl(httpurl);
vides_data::response *res=httpService.httpPostDeviceStatus(reStatus);
if(res->code!=0){
qInfo()<<"盒子状态上报失败 code:"<<res->code<<"msg:"<<res->msg;
}
instace.deleteObj(res);
updateLocalFace(httpurl);
instace.deleteObj(re);
......@@ -696,7 +726,18 @@ void MainWindow::iniRecordingToString(QString &recorJson){
QJsonDocument jsonDocument(jsonArray);
recorJson = QString::fromUtf8(jsonDocument.toJson());
}
void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::list<vides_data::responseArea>&areas){
__uint8_t MainWindow::intToUint8t(int algorithm){
if (algorithm >= 0 && algorithm <= 255) {
return static_cast<__uint8_t>(algorithm);
}
// 处理错误或取值超出范围的情况
qInfo()<<"Value out of range for conversion to __uint8_t";
return 0x07;
}
void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::list<vides_data::responseArea>&areas,int algorithm,std::list<vides_data::requestCameraInfo>&camera_info_list){
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
float carConfidence=qSetting->value("devices/carConfidence").toFloat();
int image_save=qSetting->value("devices/image_save").toInt();
......@@ -717,8 +758,15 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
uint64 face_frequency=qSetting->value("devices/face_frequency").toULongLong();
cameraHandle->initSdkRealTimeDevSnapSyn(sdk_handle,synTime,face_frequency);
int seTime=qSetting->value("timer/semaphore_time").toInt();
cameraHandle->setTimeoutMs(seTime);
vides_data::requestCameraInfo camera_info;
camera_info.sSn=parameter.sSn;
camera_info.ip_addr=parameter.sDevId;
cameraHandle->findFirmwareVersion(camera_info.firmware_version);
camera_info_list.push_front(camera_info);
__uint8_t new_algorithm= intToUint8t(algorithm);
cameraHandle->initAlgorithmPermissions(new_algorithm);
cameraHandle->initParkingSpaceInfo(areas);
Common & instace= Common::getInstance();
......
......@@ -43,10 +43,13 @@ public:
void initFaceFaceRecognition();
void initCameras(vides_data::cameraParameters &parameter,const std::list<vides_data::responseArea>&areas);
void initCameras(vides_data::cameraParameters &parameter,
const std::list<vides_data::responseArea>&areas,int algorithm,std::list<vides_data::requestCameraInfo>&camera_info_list);
__uint8_t intToUint8t(int algorithm);
static MainWindow * sp_this;
CameraHandle* findHandle(QString sn);
void sendJsonResponse(QTcpSocket* socket, int code, const QString& data, const QString& msg);
......@@ -103,6 +106,7 @@ private:
QTcpServer server;
QString localSn;
//本地id:图片路径
std::map<QString,QString>localImageMap;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment