Commit 5fe7062a by “liusq”

增加算法过滤和mark区域

parent 476b9ed9
......@@ -11,6 +11,7 @@
#include "Json_Header/System_TimeZone.h"
#include "Json_Header/RecordCfg.h"
#include "Json_Header/NetWork_SPVMN.h"
#include "Json_Header/NetWork_Wifi.h"
#include "Json_Header/SystemInfo.h"
#include "Json_Header/OPMachine.h"
#include "mainwindow.h"
......@@ -40,8 +41,9 @@ enum CAR_INFORMATION {
class CameraHandle: public QObject {
Q_OBJECT
public:
CameraHandle(QString &url,QString &httpUrl,QString &sSn, int &channel,const QString &modelPaths,
float carConfidence,int imageSave);
CameraHandle(QString &url,QString &httpUrl,QString &sSn, int &channel,
const QString &modelPaths,
float carConfidence,float carShapeConfidence, int imageSave);
CameraHandle();
~CameraHandle();
int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
......@@ -54,19 +56,30 @@ public:
void clearCameraHandle();
void initAlgorithmParameter(float &height_reference);
// void rebindTimer(int hDevice);
void initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 face_frequency);
void updateImage(const cv::Mat & frame,qint64 currentTime);
void matToBase64(const cv::Mat &image, QByteArray &base64Data);
//把原始图片转换成不同区域的掩码
void matToAreaMask(const cv::Mat &source,std::map<int,cv::Mat> &maskFrame);
int callbackFunction(XSDK_HANDLE hObject,QString &szString);
void checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat & frame,RecognizedInfo& newInfo,int &result,std::map<int,RecognizedInfo>&exitAndMoMap);
void checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat & frame,RecognizedInfo& newInfo,int &result);
void licensePlateRecognitionResults(vides_data::requestLicensePlate &location);
void sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel);
void printWifi(XSDK_HANDLE hDevice,XSDK_CFG::NetWork_Wifi &cfg);
//设置相机连接的wifi
void sdkWifi(QString &pwd,QString &ssid);
//时间设置
void sdkDevSystemTimeZoneSyn(QString &time);
//录像设置
......@@ -79,19 +92,24 @@ public:
void deviceReboot();
//获取固件版本
void findFirmwareVersion(QString &firmwareVersion);
//获取ip
void findIp(QString &ip);
void sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
QString startTimer,QString endTime);
void batchRegionalPushLicensePlate(QByteArray &imgs,qint64 currentTime,vides_data::requestLicensePlate &newPlate);
void faceUniformOverlap(std::map<QString,vides_data::requestFaceReconition>&mapFaces,
std::vector<vides_data::ParkingArea> &uniforms,
std::list<QString>&outUniforms);
bool isClockwise(const std::vector<cv::Point2f>& polygon);
QString getSSn();
int getMediaHandle();
void setMediaHandle(int mediaHandle);
void setCurrentFace(int currentFace);
void initAlgorithmPermissions(__uint8_t algorithm);
void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas);
......@@ -103,13 +121,16 @@ public:
std::map<QString, QString>&getCurrentData();
bool isChanged(const QPoint& newInfo, const QPoint& current);
// 检查点是否在多边形内
bool polygonsOverlap(ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2);
// 计算两个多边形的交集面积
double calculateIntersectionArea(const QPolygonF &polygon1, const QPolygonF &polygon2);
double ccw(const QPointF& a, const QPointF& b, const QPointF& c);
void getCurrentFrame(std::vector<uchar> &buffer);
bool isAnyOverlap(ParkingSpaceInfo *parkArea,std::vector<vides_data::ParkingArea> &currentPlates);
int findPointRegion(ParkingSpaceInfo &prakArea);
int determineArea(ParkingSpaceInfo &prakArea);
signals:
......@@ -130,10 +151,7 @@ private :
SXSDKLoginParam *loginParam;
SXMediaFaceImageReq *sxMediaFaceImageReq;
std::mutex plateMutex;
std::mutex faceMutex;
QString sSn;
QString url;
std::map<int, vides_data::responseRecognitionData> videoCurrentData;
......@@ -144,8 +162,9 @@ private :
std::map<int,ParkingSpaceInfo*>parkMap;
//当前相机监视所以车位区域
std::vector<ParkingSpaceInfo*>parkingSpaceInfos;
//当前人脸数
int currentFace;
//当前人脸数和工作人数
QPoint faceMapWorker;
int mediaHandle;
//2秒钟抓一次图
......@@ -158,14 +177,14 @@ private :
P_HLPR_Context ctx ;
QSemaphore semaphore;
int image_save;
std::atomic<uint64> faceCount;
uint64 face_frequency;
__uint8_t algorithmPermissions;
};
#endif // CAMERAHANDLE_H
......@@ -62,8 +62,8 @@ private:
QString videoOut;
QString videoDownload;
QString images;
double carConfidenceMax;
double carConfidenceMin;
float carConfidenceMax;
float carConfidenceMin;
Common();
~Common();
......
......@@ -209,6 +209,7 @@ void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_d
HInt32 featureNum;
HF_GetFeatureLength(ctxHandle, &featureNum);
for(int j=0;j< multipleFaceData.detectedNum; ++j){
qDebug()<<QString("doesItExistEmployee==>面部索引: %1").arg(j);
std::vector<float> newfeature(featureNum,0.0f);
ret = HF_FaceFeatureExtractCpy(ctxHandle, imageSteamHandle, multipleFaceData.tokens[j], newfeature.data());
if(ret != HSUCCEED) {
......
......@@ -15,7 +15,7 @@ HttpService::~HttpService() {
vides_data::response* HttpService::httpPostDeviceStatus(vides_data::requestDeviceStatus & deviceStatus) {
httpUrl.append("/api/v1.0/device/ping");
// 创建主 JSON 对象
QJsonObject json;
json.insert("sn", deviceStatus.sSn);
......@@ -23,7 +23,7 @@ vides_data::response* HttpService::httpPostDeviceStatus(vides_data::requestDevic
json.insert("state", deviceStatus.status);
json.insert("ip_addr", deviceStatus.ip_addr);
json.insert("firmware_version", deviceStatus.firmware_version); // 将固件版本添加到主 JSON 对象中
// 创建摄像头信息列表 JSON 数组
QJsonArray cameraArray;
for (const auto& cameraInfo : deviceStatus.camera_info_list) {
......@@ -35,11 +35,11 @@ vides_data::response* HttpService::httpPostDeviceStatus(vides_data::requestDevic
}
// 将摄像头信息列表添加到主 JSON 对象中
json.insert("camera_info_list", cameraArray);
// 将 JSON 对象转换为 JSON 文档
QJsonDocument jsonDoc(json);
QByteArray bytearr = jsonDoc.toJson(QJsonDocument::Compact);
vides_data::response *resp = new vides_data::response();
QNetworkRequest request;
request.setUrl(QUrl(httpUrl));
......@@ -116,7 +116,7 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d
resp->code=map["code"].toInt();
QJsonObject dataObj = maps["data"].toObject();
// 处理"sts_credentials"字段
QJsonObject stsCredentialsObj = dataObj["sts_credentials"].toObject();
HttpService::stsCredentials.access_key_id=responseData.sts_credentials.access_key_id = stsCredentialsObj["access_key_id"].toString();
......@@ -146,12 +146,12 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d
area.bottom_right_corner_y = areaObject["bottom_right_corner_y"].toDouble();
area.top_left_corner_x = areaObject["top_left_corner_x"].toDouble();
area.top_left_corner_y = areaObject["top_left_corner_y"].toDouble();
area.bottom_left_corner_x = areaObject["bottom_left_corner_x"].toDouble();
area.bottom_left_corner_y = areaObject["bottom_left_corner_y"].toDouble();
area.top_right_corner_x = areaObject["top_right_corner_x"].toDouble();
area.top_right_corner_y = areaObject["top_right_corner_y"].toDouble();
status.areas.push_back(area);
}
......@@ -182,28 +182,27 @@ vides_data::response *HttpService::httpLicensePlateRecognition(vides_data::reque
{"bottom_right_corner_y", plate.areaLocation.bottomRightCornerY},
{"top_left_corner_x", plate.areaLocation.topLeftCornerX},
{"top_left_corner_y", plate.areaLocation.topLeftCornerY},
{"bottom_left_corner_x", plate.areaLocation.bottomLeftCornerX},
{"bottom_left_corner_y", plate.areaLocation.bottomLeftCornerY},
{"top_right_corner_x", plate.areaLocation.topRightCornerX},
{"top_right_corner_y", plate.areaLocation.topRightCornerY}
};
item.insert("camera_location", cameraObject);
item.insert("img", QJsonValue::fromVariant(plate.img)); // 替换为真实的图像数据
QJsonObject locationObject {
{"bottom_right_corner_x", plate.recognition.bottomRightCornerX},
{"bottom_right_corner_y", plate.recognition.bottomRightCornerY},
{"top_left_corner_x", plate.recognition.topLeftCornerX},
{"top_left_corner_y", plate.recognition.topLeftCornerY},
{"bottom_left_corner_x", plate.recognition.bottomLeftCornerX},
{"bottom_left_corner_y", plate.recognition.bottomLeftCornerY},
{"top_right_corner_x", plate.recognition.topRightCornerX},
{"top_right_corner_y", plate.recognition.topRightCornerY}
};
item.insert("location", locationObject);
item.insert("new_color", plate.new_color); // 替换为真实的颜色数据
item.insert("new_plate", plate.new_plate); // 使用LicensePlate结构中的车牌号字段
......@@ -230,7 +229,7 @@ vides_data::response *HttpService::httpLicensePlateRecognition(vides_data::reque
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
QJsonObject data = map["data"].toJsonObject();
QJsonArray dataList = data["list"].toArray(); // 获取 "list" 数组
for (const auto& item : dataList) {
......@@ -245,9 +244,11 @@ vides_data::response *HttpService::httpLicensePlateRecognition(vides_data::reque
// 将 res 添加到结果列表或进行其他操作
}
}else{
qDebug()<<m_httpClient.errorCode();
qDebug()<<"httpLicensePlateRecognition"<<m_httpClient.errorCode();
qDebug()<<"httpLicensePlateRecognition msg"<<m_httpClient.errorString();
resp->code=2;
resp->msg=OPERATION_FAILED;
resp->msg=m_httpClient.errorString();
}
return resp;
}
......@@ -288,17 +289,17 @@ vides_data::response* HttpService::httpFindFaceReconition(QString &serialNumber,
return resp;
}
vides_data::response *HttpService::httpPostUniforms(QByteArray &img,int &number,QString sn,qint64 time){
vides_data::response *HttpService::httpPostUniforms(QByteArray &img,QString &id,QString sn,qint64 time){
httpUrl.append("/api/v1.0/recongnition/uniform");
QJsonObject json;
json.insert("img", QJsonValue::fromVariant(img));
json.insert("sn",sn);
json.insert("number",number);
json.insert("id",id);
json.insert("time",QJsonValue::fromVariant(time));
QJsonDocument jsonDoc;
jsonDoc.setObject(json);
QByteArray bytearr= jsonDoc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
request.setUrl(QUrl(httpUrl));
......@@ -320,17 +321,26 @@ vides_data::response *HttpService::httpPostUniforms(QByteArray &img,int &number,
return resp;
}
vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &number,QString sn,qint64 time){
vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &human,int &worker,QString sn,qint64 time){
httpUrl.append("/api/v1.0/recongnition/population");
QJsonObject json;
json.insert("img", QJsonValue::fromVariant(img));
json.insert("sn",sn);
json.insert("number",number);
QJsonObject jsonObject;
jsonObject.insert("human",human);
jsonObject.insert("worker", worker);
// 使用QJsonDocument来转换为字符串
QJsonDocument humanData(jsonObject);
QString jsonString = QString::fromUtf8(humanData.toJson(QJsonDocument::Compact));
json.insert("desc",jsonString);
json.insert("time",QJsonValue::fromVariant(time));
QJsonDocument jsonDoc;
jsonDoc.setObject(json);
QByteArray bytearr= jsonDoc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
request.setUrl(QUrl(httpUrl));
......@@ -359,23 +369,23 @@ vides_data::response *HttpService::httpPostFaceReconition(vides_data::requestFac
json.insert("img", QJsonValue::fromVariant(faceReconition.img));
json.insert("sn",faceReconition.sn);
json.insert("time",QJsonValue::fromVariant(faceReconition.time));
// 创建 location 对象
QJsonObject location;
location.insert("bottom_right_corner_x", faceReconition.area.bottom_right_corner_x);
location.insert("bottom_right_corner_y", faceReconition.area.bottom_right_corner_y);
location.insert("top_left_corner_x",faceReconition.area.top_left_corner_x);
location.insert("top_left_corner_y",faceReconition.area.top_left_corner_y);
location.insert("bottom_left_corner_x", faceReconition.area.bottom_left_corner_x);
location.insert("bottom_left_corner_y", faceReconition.area.bottom_left_corner_y);
location.insert("top_right_corner_x",faceReconition.area.top_right_corner_x);
location.insert("top_right_corner_y",faceReconition.area.top_right_corner_y);
// 将 location 对象插入到主 json 对象中
json.insert("location", location);
QJsonDocument jsonDoc;
jsonDoc.setObject(json);
QByteArray bytearr= jsonDoc.toJson(QJsonDocument::Compact);
......@@ -384,7 +394,7 @@ vides_data::response *HttpService::httpPostFaceReconition(vides_data::requestFac
QNetworkRequest request;
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
if(m_httpClient.post(request,bytearr)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
......@@ -480,7 +490,7 @@ vides_data::response *HttpService::httpDownload( const QString &filePath,QString
QNetworkRequest request;
request.setUrl(url);
QString fileName = url.fileName();
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.downloadFile(request,filePath,fullPathName,fileName)){
......
......@@ -34,10 +34,10 @@ public:
//人脸识别推送
vides_data::response *httpPostFaceReconition(vides_data::requestFaceReconition & faceReconition);
//人数变化推送
vides_data::response *httpPostFacePopulation(QByteArray &img,int &number,QString sn,qint64 time);
vides_data::response *httpPostFacePopulation(QByteArray &img,int &human,int &worker,QString sn,qint64 time);
//工服推送
vides_data::response *httpPostUniforms(QByteArray &img,int &number,QString sn,qint64 time);
vides_data::response *httpPostUniforms(QByteArray &img,QString &id,QString sn,qint64 time);
//客户端组列表
vides_data::response *httpFindStream(QString &serialNumber);
......@@ -57,6 +57,7 @@ private:
QString httpUrl;
HttpClient m_httpClient;
QMutex m_httpClientMutex;
};
......
#ifndef HUMANDETECTION_H
#define HUMANDETECTION_H
#include "VidesData.h"
#include "so_human_sdk.h"
#include <opencv2/opencv.hpp>
#include <QDebug>
......@@ -9,17 +10,21 @@ public:
HumanDetection();
~HumanDetection();
void initDetector();
int findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector *detector);
int findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector *detector,std::vector<vides_data::ParkingArea> &currentPlate);
static HumanDetection& getInstance()
{
static HumanDetection instance;
return instance;
}
void setHeightReference(float &height_reference);
void draw_human_on_image(const cv::Mat& image, const TCV_ObjectLocation* boxes, int size);
private:
static HumanDetection* instance;
//高度基准
float height_reference;
};
......
......@@ -46,7 +46,7 @@ void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const
char* m_path=by_mpath.data();
configuration.models_path = m_path;
configuration.max_num = 5;
configuration.det_level = DETECT_LEVEL_LOW;
configuration.det_level = DETECT_LEVEL_HIGH;
configuration.use_half = false;
configuration.nms_threshold = 0.5f;
configuration.rec_confidence_threshold = 0.8f;
......@@ -89,7 +89,29 @@ void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const
HLPR_ReleaseContext(ctx1);
}
void LicensePlateRecognition::replaceWith1And0(QString &code) {
code.replace(QRegularExpression("[Ii]"), "1");
code.replace(QRegularExpression("[Oo]"), "0");
}
void LicensePlateRecognition::filterLicensePlateConfidenceMax(vides_data::requestLicensePlate &plate, vides_data::LicensePlate &max) {
std::list<vides_data::LicensePlate> &plates = plate.plates; // 使用引用避免复制列表
if (plates.empty()) { // 检查列表是否为空
// 如果列表为空,可能需要设定一个默认值或者抛出异常,这里简单地不改变max
return;
}
max = plates.front(); // 初始化max为第一个元素
float maxConfidence = max.text_confidence;
for (auto it = plates.begin(); it != plates.end(); ++it) {
if (it->text_confidence > maxConfidence) {
max = *it; // 发现更高信心值的LicensePlate,更新max
maxConfidence = it->text_confidence;
}
}
}
void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime,P_HLPR_Context ctx) {
......@@ -130,7 +152,10 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
vides_data::LicensePlate newPlate;
newPlate.time=currentTime;
newPlate.new_color=QString::fromStdString(type);
newPlate.new_plate=QString::fromUtf8(results.plates[i].code);
QString car_nuber=QString::fromUtf8(results.plates[i].code);
replaceWith1And0(car_nuber);
qDebug()<<"I O (i o)大小写替换为 1 0结果:==>"<<car_nuber;
newPlate.new_plate=car_nuber;
newPlate.text_confidence=results.plates[i].text_confidence;
vides_data::ParkingArea area;
area.topLeftCornerX=results.plates[i].x1;
......
......@@ -9,6 +9,7 @@
#include <QFile>
#include <QImage>
#include <mutex>
#include <QRegularExpression>
const std::vector<std::string> types =
{"蓝牌", "黄牌单层", "白牌单层", "绿牌新能源", "黑牌港澳",
......@@ -24,16 +25,17 @@ public:
void licensePlateNumber(const cv::Mat &source,QString & lpNumber, vides_data::requestLicensePlate &plate,
qint64 currentTime,P_HLPR_Context ctx);
void filterLicensePlateConfidenceMax(vides_data::requestLicensePlate &plate,vides_data::LicensePlate &max);
void oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber);
// void initHlprContext(const QString &modelPaths,const QString &carCascade,float carConfidence);
// void initHlprContext(const QString &modelPaths,const QString &carCascade,float carConfidence);
void replaceWith1And0( QString &code);
private:
static LicensePlateRecognition* instance;
//P_HLPR_Context ctx ;
float carConfidence;
std::mutex carMutex;
......
......@@ -58,7 +58,7 @@ static int sdkInitCallback(XSDK_HANDLE hObject, int nMsgId, int nParam1,
QString qString(szString);
CameraHandle* cameraHandle= mediaFaceImage->getCurrentDevice().at(hObject);
QThreadPool* threadPool = QThreadPool::globalInstance();
threadPool->setMaxThreadCount(12);
auto taskCallBack=std::bind(&CameraHandle::callbackFunction, cameraHandle, hObject, qString);
auto taskRunnable = new TaskRunnable(taskCallBack, hObject,cameraHandle->getChannel(), RunFunction::SdkCallbackFunction);
// task->setAutoDelete(false); // 确保task不会在执行后被自动删除
......
......@@ -9,12 +9,15 @@ public:
ParkingSpaceInfo(RecognizedInfo & currentPlate);
ParkingSpaceInfo();
~ParkingSpaceInfo();
RecognizedInfo& getCurrentPlate();
void setCurrentPlate(RecognizedInfo & current);
void addQueue(RecognizedInfo &info);
void removeQueue();
void removeNoQueue();
QQueue<RecognizedInfo> &getQueue();
void setArea(vides_data::ParkingArea &a);
vides_data::ParkingArea &getArea();
......
......@@ -11,6 +11,10 @@
#include <QTextStream>
#include <QByteArray>
#include <QNetworkInterface>
#include <iostream>
#include <cstdio>
#include <string>
#include <sstream>
#include <list>
namespace vides_data{
constexpr const char *HEADER_TYPE_KAY="Content-Type";
......@@ -147,7 +151,7 @@ struct LicensePlate
QString new_color;
QByteArray img;
qint64 time;
ParkingArea recognition;
ParkingArea recognition;//识别区域
float text_confidence;
LicensePlate() {}
};
......@@ -258,6 +262,7 @@ inline QString getDefaultGateway() {
#endif
return gateway;
}
inline bool pingAddress(const QString &address) {
QProcess process;
QString program = "ping";
......
QT += core gui network multimedia sql concurrent
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
CONFIG += c++11
......@@ -11,46 +12,46 @@ TEMPLATE = app
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.0.1\\\"
QMAKE_LIBDIR += /usr/local/lib
INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human
INCLUDEPATH+=/usr/local/include/CImg
#unix:contains(QMAKE_HOST.arch, x86_64) {
# QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
#}
#unix:contains(QMAKE_HOST.arch, arm) {
# QMAKE_LIBDIR += /usr/local/lib
#}
## 根据编译器类型选择库路径和头文件路径
#unix: {
# # x86 架构
# contains(QMAKE_HOST.arch, x86_64) {
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
# INCLUDEPATH+=/home/mark/Public/x86_opencv/include/CImg
# }
# # ARM 架构
# contains(QMAKE_HOST.arch, arm) {
# INCLUDEPATH+=/usr/local/include/opencv4
# INCLUDEPATH+=/usr/local/include/hyperface
# INCLUDEPATH+=/usr/local/include/hyper
# INCLUDEPATH+=/usr/local/include/XNetSDK
# INCLUDEPATH+=/usr/local/include/human
# }
#}
DEFINES += APP_VERSION=\\\"1.0.2\\\"
#QMAKE_LIBDIR += /usr/local/lib
#INCLUDEPATH+=/usr/local/include/opencv4
#INCLUDEPATH+=/usr/local/include/hyperface
#INCLUDEPATH+=/usr/local/include/hyper
#INCLUDEPATH+=/usr/local/include/XNetSDK
#INCLUDEPATH+=/usr/local/include/human
#INCLUDEPATH+=/usr/local/include/CImg
unix:contains(QMAKE_HOST.arch, x86_64) {
QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
}
unix:contains(QMAKE_HOST.arch, arm) {
QMAKE_LIBDIR += /usr/local/lib
}
# 根据编译器类型选择库路径和头文件路径
unix: {
# x86 架构
contains(QMAKE_HOST.arch, x86_64) {
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/CImg
}
# ARM 架构
contains(QMAKE_HOST.arch, arm) {
INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human
}
}
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
......
......@@ -63,7 +63,17 @@ MainWindow::MainWindow()
connect(dePermissionSynTimer, &QTimer::timeout, this, [this, httpurl](){
this->startCamera(httpurl);
},Qt::QueuedConnection);
dePermissionSynTimer->start(dePermissionTimer);
this->startCamera(httpurl);
// 设置定时器间隔
dePermissionSynTimer->setInterval(dePermissionTimer);
// 启动定时器
dePermissionSynTimer->start();
//dePermissionSynTimer->start(dePermissionTimer);
//vides_data::scanWiFiNetworks();
connect(&server, &QTcpServer::newConnection, this, &MainWindow::handleMatNewConnection);
......@@ -76,6 +86,7 @@ MainWindow::MainWindow()
}
}
CameraHandle* MainWindow::findHandle(QString sn){
for (auto it = faceDetectionParkingPushs.begin(); it != faceDetectionParkingPushs.end(); ++it) {
QString currentSn = it->second->getSSn();
......@@ -751,8 +762,11 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
float carConfidence=qSetting->value("devices/carConfidence").toFloat();
int image_save=qSetting->value("devices/image_save").toInt();
float heightReference=qSetting->value("devices/height_reference").toFloat();
float carShapeConfidence=qSetting->value("devices/carShapeConfidence").toFloat();
CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,modelPaths,carConfidence,image_save);
CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,modelPaths,carConfidence,carShapeConfidence,image_save);
int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,10000);
qDebug()<<"句柄为2:"<<sdk_handle;
......@@ -763,11 +777,16 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::
initDevConfigSyn(cameraHandle);
mediaFaceImage->setMap(sdk_handle,cameraHandle);
cameraHandle->sdkDevSetAlarmListener(sdk_handle,1);
cameraHandle->sdkDevSetAlarmListener(sdk_handle,0);
int synTime=qSetting->value("timer/dev_snap_syn_timer").toInt();
uint64 face_frequency=qSetting->value("devices/face_frequency").toULongLong();
cameraHandle->initSdkRealTimeDevSnapSyn(sdk_handle,synTime,face_frequency);
cameraHandle->initAlgorithmParameter(heightReference);
QString pwd="admin2024";
QString sid="MERCURY_8C4F";
cameraHandle->sdkWifi(pwd,sid);
vides_data::requestCameraInfo camera_info;
camera_info.sSn=parameter.sSn;
camera_info.ip_addr=parameter.sDevId;
......
......@@ -50,7 +50,7 @@ public:
static MainWindow * sp_this;
CameraHandle* findHandle(QString sn);
void sendJsonResponse(QTcpSocket* socket, int code, const QString& data, const QString& msg);
void sendEmptyResponse(QTcpSocket* socket);
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment