Commit 22c70dd0 by 郭峰

Merge branch 'release' into 'master'

Release河道master

See merge request !25
parents 6d4d10ba 27ead076
......@@ -49,10 +49,11 @@ void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths,
humanDetections.emplace_back(human);
}
}
void AlgorithmTaskManage::initLicensePlateManage(const QString &modelPaths,
float carConfidence){
void AlgorithmTaskManage::initLicensePlateManage(const QString &modelPaths,bool is_high,int maxNum,bool useHalf,
float boxThreshold,float nmsThreshold,float recThreshold){
for (int i = 0; i < licensePlateLen; ++i) {
LicensePlateRecognition* licensePlateRecognition=new LicensePlateRecognition(modelPaths,carConfidence);
LicensePlateRecognition* licensePlateRecognition=new LicensePlateRecognition(
modelPaths,is_high,maxNum,useHalf,boxThreshold,nmsThreshold,recThreshold);
licensePlateRecognitions.emplace_back(licensePlateRecognition);
}
......@@ -95,27 +96,38 @@ AlgorithmTaskManage::~AlgorithmTaskManage(){
}
}
void AlgorithmTaskManage::releaseResources(
int newHumanDetectionLen, int newLicensePlateLen, int newFaceLen,const QString &odelPaths,
float humanCarShapeConfidence,
int uniformColor,
float licensePlateCarConfidence,
std::map<QString,QString>& faceMaps,
int numberFaces,
float faceConfidence,
__uint8_t algorithmPermissions) {
void AlgorithmTaskManage::releaseResources(const vides_data::DetectionParams& params) {
Common & instance = Common::getInstance();
isShuttingDown.store(true, std::memory_order_release);
ScopeSemaphoreExit guard([this]() {
isShuttingDown.store(false, std::memory_order_release);
});
__uint8_t algorithmPermissions = params.algorithmPermissions;
qInfo()<<"修改参数:releaseResources "<<algorithmPermissions;
int newHumanDetectionLen = params.newHumanDetectionLen;
int newLicensePlateLen = params.newLicensePlateLen;
int newFaceLen = params.newFaceLen;
QString modelPath = params.modelPaths;
float humanCarShapeConfidence = params.humanCarShapeConfidence;
int uniformColor = params.uniformColor;
std::map<QString, QString> faceMaps = params.faceMaps;
int numberFaces = params.numberFaces;
float faceConfidence = params.faceConfidence;
bool high = params.isHigh;
int maxNum = params.maxNum;
bool useHalf = params.useHalf;
float boxThreshold = params.boxConfThreshold;
float nmsThreshold = params.nmsThreshold;
float recThreshold = params.recConfidenceThreshold;
// 穿工服算法参数更新
if ((algorithmPermissions & 0x01 << 2) != 0) {
resetSemaphoreAndClearObjects(instance,semaphore, humanDetections, humanDetectionLen);
initialize(newHumanDetectionLen, newLicensePlateLen, newFaceLen, false, 0x00);
initHumanDetectionManage(odelPaths, humanCarShapeConfidence, uniformColor);
initHumanDetectionManage(modelPath, humanCarShapeConfidence, uniformColor);
}
// 人脸算法参数更新
......@@ -129,7 +141,7 @@ void AlgorithmTaskManage::releaseResources(
if ((algorithmPermissions & 0x01) != 0) {
resetSemaphoreAndClearObjects(instance,plateSemaphore, licensePlateRecognitions, licensePlateLen);
initialize(newHumanDetectionLen, newLicensePlateLen, newFaceLen, false, 0x01);
initLicensePlateManage(odelPaths, licensePlateCarConfidence);
initLicensePlateManage(modelPath,high,maxNum,useHalf,boxThreshold,nmsThreshold,recThreshold);
}
}
......
......@@ -28,8 +28,8 @@ public:
void initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,int &uniformColor);
void initLicensePlateManage(const QString &modelPaths,
float carConfidence);
void initLicensePlateManage(const QString &modelPaths,bool is_high,int maxNum,bool useHalf,
float boxThreshold,float nmsThreshold,float recThreshold);
void modifyImageFeature(std::map<QString,QString>&maps,int numberFaces,float confidence,bool isNull);
void initFaceReconitionHandle(std::map<QString,QString>&maps,int numberFaces,float confidence);
......@@ -37,14 +37,7 @@ public:
void *schedulingAlgorithm(int scheType);
void releaseResources(int newHumanDetectionLen, int newLicensePlateLen, int newFaceLen, const QString &odelPaths,
float humanCarShapeConfidence,
int uniformColor,
float licensePlateCarConfidence,
std::map<QString,QString>& faceMaps,
int numberFaces,
float faceConfidence,
__uint8_t algorithmPermissions);
void releaseResources(const vides_data::DetectionParams& params);
......
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
......@@ -97,10 +97,14 @@ public:
void sdkEncodeCfg(const char *enCode);
//28181更新
void sdkDevSpvMn(const char* spvMn);
//gb218开启
int updateSdkDevStatus(bool status);
void updateSdkDevSpvMn(vides_data::responseGb28181 *gb28181);
//重启设备
void deviceReboot(bool isCloseHandle );
int deviceReboot();
//设备关机
int deviceShutdown();
//获取固件版本
void findFirmwareVersion(QString &firmwareVersion);
......
......@@ -37,7 +37,7 @@ cv::Mat FaceReconitionHandle::loadImage(const QString &path) {
std::string stdPath = path.toStdString();
cv::Mat image = cv::imread(stdPath, cv::IMREAD_COLOR);
if (!image.empty()) {
qDebug() << "图像以OpenCV成功加载。";
qInfo() << "图像以OpenCV成功加载。";
return image;
}
......@@ -151,7 +151,7 @@ void FaceReconitionHandle::featureRemove(){
if(customIds.size()>0){
for(auto customId:customIds){
HResult ret= HF_FeaturesGroupFeatureRemove(ctxHandle,customId);
qDebug()<<"ret:featureRemove "<<ret;
qInfo()<<"ret:featureRemove "<<ret;
}
setImageChanged(false);
}
......@@ -180,10 +180,10 @@ cv::Mat FaceReconitionHandle::loadImageFromByteStream(const QString& filePath) {
}
return mat;
} catch (const CImgException& e) {
qDebug() << "CImg Error: " << e.what();
qInfo() << "CImg Error: " << e.what();
return cv::Mat();;
} catch (const cv::Exception& e) {
qDebug() << "OpenCV Error: " << e.what();
qInfo() << "OpenCV Error: " << e.what();
return cv::Mat(); ;
}
......@@ -218,7 +218,7 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) {
qDebug()<<QString("search 未检测到人脸");
qInfo()<<QString("search 未检测到人脸");
return ;
}
......@@ -227,11 +227,11 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
HInt32 featureNum;
HF_GetFeatureLength(ctxHandle, &featureNum);
for(int j=0;j< multipleFaceData.detectedNum; ++j){
qDebug()<<QString("doesItExistEmployee==>面部索引: %1").arg(j);
qInfo()<<QString("doesItExistEmployee==>面部索引: %1").arg(j);
std::vector<float> newfeature(featureNum,0.0f);
ret = HF_FaceFeatureExtractCpy(ctxHandle, imageSteamHandle, multipleFaceData.tokens[j], newfeature.data());
if(ret != HSUCCEED) {
qDebug()<<QString("特征提取出错: %1").arg(ret);
qInfo()<<QString("特征提取出错: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle);
return ;
}
......@@ -252,9 +252,9 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
return ;
}
qDebug()<<QString("搜索置信度: %1").arg(confidence);
qDebug()<<QString("匹配到的tag: %1").arg(searchIdentity.tag);
qDebug()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
qInfo()<<QString("搜索置信度: %1").arg(confidence);
qInfo()<<QString("匹配到的tag: %1").arg(searchIdentity.tag);
qInfo()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
// Face Pipeline
//printf("人脸特征数量: %d", faceNum);
if (confidence > configConfidence) {
......
File mode changed from 100644 to 100755
......@@ -53,7 +53,7 @@ vides_data::response* HttpService::httpPostDeviceStatus(vides_data::requestDevic
resp->code = map["code"].toInt();
resp->msg = map["message"].toString();
} else {
qDebug() << "httpPostDeviceStatus" << m_httpClient.errorCode();
qInfo() << "httpPostDeviceStatus" << m_httpClient.errorCode();
resp->code = 2;
resp->msg = m_httpClient.errorString();
}
......@@ -86,7 +86,7 @@ vides_data::response* HttpService::httpPostRecord(int id,int recongnition_type,Q
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
qInfo()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
......@@ -159,7 +159,7 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d
}
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
qInfo()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
......@@ -244,11 +244,11 @@ vides_data::response *HttpService::httpLicensePlateRecognition(vides_data::reque
// 将 res 添加到结果列表或进行其他操作
}
}else{
qDebug()<<"httpLicensePlateRecognition"<<m_httpClient.errorCode();
qDebug()<<"httpLicensePlateRecognition msg"<<m_httpClient.errorString();
qInfo()<<"httpLicensePlateRecognition"<<m_httpClient.errorCode();
qInfo()<<"httpLicensePlateRecognition msg"<<m_httpClient.errorString();
resp->code=2;
resp->msg=m_httpClient.errorString();
resp->msg=m_httpClient.errorCode();
}
return resp;
}
......@@ -282,7 +282,7 @@ vides_data::response* HttpService::httpFindFaceReconition(QString &serialNumber,
}
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
qInfo()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
......@@ -313,10 +313,10 @@ vides_data::response *HttpService::httpPostUniforms(QByteArray &img,QString &id,
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
qDebug()<<"httpPostUniforms"<<m_httpClient.errorString();
qInfo()<<m_httpClient.errorCode();
qInfo()<<"httpPostUniforms"<<m_httpClient.errorString();
resp->code=2;
resp->msg=OPERATION_FAILED;
resp->msg=m_httpClient.errorCode();
}
return resp;
}
......@@ -354,20 +354,20 @@ vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &h
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qDebug()<<"httpPostFacePopulation===>";
qDebug()<<m_httpClient.errorCode();
qDebug()<<m_httpClient.errorString();
qDebug()<<"httpPostFacePopulation===>end";
qInfo()<<"httpPostFacePopulation===>";
qInfo()<<m_httpClient.errorCode();
qInfo()<<m_httpClient.errorString();
qInfo()<<"httpPostFacePopulation===>end";
resp->code=2;
resp->msg=OPERATION_FAILED;
resp->msg=m_httpClient.errorCode();
}
return resp;
}
vides_data::response *HttpService::httpPostFaceReconition(vides_data::requestFaceReconition & faceReconition){
httpUrl.append("/api/v1.0/recongnition/face");
qDebug()<<"httpPostFaceReconition"<<httpUrl;
qInfo()<<"httpPostFaceReconition"<<httpUrl;
QJsonObject json;
json.insert("id",QJsonValue::fromVariant(faceReconition.id.toInt()));
json.insert("img", QJsonValue::fromVariant(faceReconition.img));
......@@ -408,7 +408,7 @@ vides_data::response *HttpService::httpPostFaceReconition(vides_data::requestFac
resp->msg=map["message"].toString();
}else{
resp->code=2;
resp->msg=m_httpClient.errorString();
resp->msg=m_httpClient.errorCode();
//resp->msg=OPERATION_FAILED;
}
return resp;
......@@ -447,7 +447,7 @@ vides_data::response*HttpService::httpFindGb28181Config(QString &serialNumber){
resp->data=response;
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
qInfo()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=m_httpClient.errorString();
}
......@@ -518,13 +518,18 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
// 解析 licensePlateConfig
QJsonObject licensePlateConfigObj = dataObj["licensePlateConfig"].toObject();
config.licensePlateConfig.isOn = licensePlateConfigObj["isOn"].toBool();
config.licensePlateConfig.carConfidence = licensePlateConfigObj["carConfidence"].toVariant().toFloat();
config.licensePlateConfig.recConfidenceThreshold = licensePlateConfigObj["recConfidenceThreshold"].toVariant().toFloat();
config.licensePlateConfig.carConfidenceMax = licensePlateConfigObj["carConfidenceMax"].toVariant().toFloat();
config.licensePlateConfig.carConfidenceMin = licensePlateConfigObj["carConfidenceMin"].toVariant().toFloat();
config.licensePlateConfig.licensePlateLen=licensePlateConfigObj["licensePlateLen"].toInt();
config.licensePlateConfig.updateAt = licensePlateConfigObj["updateAt"].toVariant().toULongLong();
config.licensePlateConfig.maxNum=licensePlateConfigObj["maxNum"].toInt();
config.licensePlateConfig.useHalf=licensePlateConfigObj["useHalf"].toBool();
config.licensePlateConfig.boxConfThreshold = licensePlateConfigObj["boxConfThreshold"].toVariant().toFloat();
config.licensePlateConfig.nmsThreshold = licensePlateConfigObj["nmsThreshold"].toVariant().toFloat();
config.licensePlateConfig.isHigh=licensePlateConfigObj["isHigh"].toBool();
// 解析 uniformConfig
QJsonObject uniformConfigObj = dataObj["uniformConfig"].toObject();
......@@ -533,7 +538,10 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
config.uniformConfig.humanDetectionLen = uniformConfigObj["humanDetectionLen"].toInt();
config.uniformConfig.updateAt = uniformConfigObj["updateAt"].toVariant().toULongLong();
config.uniformConfig.carShapeConfidence = uniformConfigObj["carShapeConfidence"].toVariant().toFloat();
//解析 humanConfig
QJsonObject humanConfigObj = dataObj["humanConfig"].toObject();
config.humanConfig.isOn=humanConfigObj["isOn"].toBool();
config.humanConfig.updateAt = humanConfigObj["updateAt"].toVariant().toULongLong();
// 解析 devicesConfig
QJsonObject devicesConfigObj = dataObj["camera"].toObject();
......@@ -547,16 +555,19 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
//解析mqttConfig
QJsonObject mqttConfigObj = dataObj["mqttConfig"].toObject();
config.mqttConfig.address=mqttConfigObj["address"].toString();
config.mqttConfig.clientId=mqttConfigObj["clientId"].toString();
config.mqttConfig.qos=mqttConfigObj["qos"].toInt();
config.mqttConfig.timeout = mqttConfigObj["timeout"].toVariant().toULongLong();
config.mqttConfig.topic=mqttConfigObj["topic"].toString();
config.mqttConfig.username=mqttConfigObj["username"].toString();
config.mqttConfig.password=mqttConfigObj["password"].toString();
config.mqttConfig.updateAt=mqttConfigObj["updateAt"].toVariant().toULongLong();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
qInfo()<<"httpDeviceConfig;";
qInfo()<<m_httpClient.errorCode();
qInfo()<<m_httpClient.errorString();
resp->code=2;
resp->msg=m_httpClient.errorString();
}
......@@ -591,7 +602,7 @@ vides_data::response*HttpService::httpFindStream(QString &serialNumber){
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
qInfo()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
......@@ -615,8 +626,8 @@ vides_data::response *HttpService::httpDownload( const QString &filePath,QString
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
qDebug()<<m_httpClient.errorCode();
qInfo()<<m_httpClient.errorCode();
qInfo()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
......@@ -639,7 +650,7 @@ vides_data::response*HttpService::httpUploadFile(const QString &filePath,QString
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
qInfo()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
......
......@@ -87,7 +87,7 @@ bool HttpClient::downloadFile(QNetworkRequest request, const QString &filePath,
QFile file(filePa);
if (!file.open(QIODevice::WriteOnly)) {
qDebug() << "Failed to open file for writing";
qInfo() << "Failed to open file for writing";
reply->deleteLater(); // 确保释放网络回复资源
return false;
}
......@@ -98,7 +98,7 @@ bool HttpClient::downloadFile(QNetworkRequest request, const QString &filePath,
QObject::connect(reply, &QNetworkReply::finished, [&, reply]() {
if (reply->error() != QNetworkReply::NoError) {
qDebug() << "Download failed:" << reply->errorString();
qInfo() << "Download failed:" << reply->errorString();
file.remove(); // 删除不完整的文件
} else {
success = true;
......@@ -120,15 +120,15 @@ bool HttpClient::uploadFile(QNetworkRequest request,const QString& accessKeyId,
bool success = false;
QFile *file = new QFile(filePath, this);
qDebug() << "filePath"<<filePath;
qInfo() << "filePath"<<filePath;
if (!file->open(QIODevice::ReadOnly)) {
qDebug() << "uploadFile Failed to open file for reading";
qInfo() << "uploadFile Failed to open file for reading";
return false;
}
QByteArray fileData = file->readAll();
if(fileData.isEmpty()){
qDebug() << "uploadFile = file.readAll()";
qInfo() << "uploadFile = file.readAll()";
return false;
}
......@@ -167,11 +167,11 @@ bool HttpClient::uploadFile(QNetworkRequest request,const QString& accessKeyId,
QEventLoop loop;
connect(reply, &QNetworkReply::finished, this, [reply, file,&loop, &success]() {
if (reply->error() == QNetworkReply::NoError) {
qDebug() << "Upload successful!";
qInfo() << "Upload successful!";
file->remove();
success=true;
} else {
qDebug() << "Upload failed:" << reply->errorString();
qInfo() << "Upload failed:" << reply->errorString();
}
file->close();
file->deleteLater();
......
......@@ -66,9 +66,9 @@ void HumanDetection::draw_human_on_image(const cv::Mat& image, const TCV_ObjectL
bool success = cv::imwrite(fileName.toStdString(), image);
if (success) {
qDebug() << "车型图片已成功保存至:" << fileName;
qInfo() << "车型图片已成功保存至:" << fileName;
} else {
qDebug() << "图片保存失败!";
qInfo() << "图片保存失败!";
}
}
......
......@@ -4,18 +4,23 @@
LicensePlateRecognition::LicensePlateRecognition(const QString &modelPaths, float carConfidence) {
LicensePlateRecognition::LicensePlateRecognition(const QString &modelPaths,bool is_high,int maxNum,
bool useHalf,float boxThreshold,float nmsThreshold,float recThreshold) {
HLPR_ContextConfiguration configuration = {0};
QByteArray && by_mpath=modelPaths.toUtf8();
char* m_path=by_mpath.data();
configuration.models_path = m_path;
configuration.max_num = 5;
configuration.det_level = DETECT_LEVEL_LOW;
configuration.use_half = false;
configuration.nms_threshold = 0.5f;
configuration.rec_confidence_threshold = carConfidence;
configuration.box_conf_threshold = 0.30f;
configuration.max_num = maxNum;
if(is_high){
configuration.det_level = DETECT_LEVEL_HIGH;
}else{
configuration.det_level = DETECT_LEVEL_LOW;
}
configuration.use_half = useHalf;
configuration.nms_threshold =nmsThreshold;
configuration.rec_confidence_threshold = recThreshold;
configuration.box_conf_threshold = boxThreshold;
configuration.threads = 1;
ctx = HLPR_CreateContext(&configuration);
}
......@@ -77,7 +82,7 @@ void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const
} else {
type = types[results.plates[i].type];
}
qDebug()<<QString("车牌号:%1").arg(results.plates[i].code);
qInfo()<<QString("车牌号:%1").arg(results.plates[i].code);
QString plateResult = QString("第%1个,%2,车牌号:%3,置信度:%4,左上角点x坐标:%5,左上角点y坐标:%6,右下角点x坐标:%7,右下角点y坐标:%8")
.arg(i + 1).arg(type.c_str()).arg(results.plates[i].code)
.arg(results.plates[i].text_confidence).arg(results.plates[i].x1, 0, 'Q', 4)
......@@ -163,7 +168,7 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
newPlate.new_color=QString::fromStdString(type);
QString car_nuber=QString::fromUtf8(results.plates[i].code);
replaceWith1And0(car_nuber);
qDebug()<<"I O (i o)大小写替换为 1 0结果:==>"<<car_nuber;
qInfo()<<"I O (i o)大小写替换为 1 0结果:==>"<<car_nuber;
newPlate.new_plate=car_nuber;
newPlate.text_confidence=results.plates[i].text_confidence;
vides_data::ParkingArea area;
......
......@@ -29,7 +29,9 @@ public:
void replaceWith1And0( QString &code);
LicensePlateRecognition(const QString &modelPaths,float carConfidence);
LicensePlateRecognition(const QString &modelPaths,bool is_high,int maxNum,
bool useHalf,float boxThreshold,float nmsThreshold,float recThreshold
);
LicensePlateRecognition();
......
......@@ -38,7 +38,7 @@ LogHandlerPrivate::LogHandlerPrivate() {
flushLogFileTimer.setInterval(1000); // TODO: 可从配置文件读取
flushLogFileTimer.start();
QObject::connect(&flushLogFileTimer, &QTimer::timeout, [] {
// qDebug() << QDateTime::currentDateTime().toString("yyyy-MM-dd hh:mm:ss"); // 测试不停的写入内容到日志文件
// qInfo() << QDateTime::currentDateTime().toString("yyyy-MM-dd hh:mm:ss"); // 测试不停的写入内容到日志文件
QMutexLocker locker(&LogHandlerPrivate::logMutex);
if (nullptr != logOut) {
logOut->flush();
......
......@@ -82,7 +82,7 @@ int MediaFaceImage::SdkSearchDevicesSyn(std::map<QString, vides_data::localDevic
printf("nCount:%d\r\n", nActualCount);
if (nActualCount <= 0)
{
qDebug() << QString("Search no Device");
qInfo() << QString("Search no Device");
delete[] pRet;
return -1;
}
......@@ -91,7 +91,7 @@ int MediaFaceImage::SdkSearchDevicesSyn(std::map<QString, vides_data::localDevic
{
for (int i = 0; i < nActualCount; i++)
{
qDebug() << QString("[%1][IP:%2.%3.%4.%5][SN:%6][Mac:%7]")
qInfo() << QString("[%1][IP:%2.%3.%4.%5][SN:%6][Mac:%7]")
.arg(i)
.arg(pRet[i].HostIP.c[0])
.arg(pRet[i].HostIP.c[1])
......@@ -139,7 +139,7 @@ int MediaFaceImage::SdkInit(QString &szConfigPath, QString &szTempPath) {
void MediaFaceImage::ParserImageData(const unsigned char* pData, int nDataLen, char* pJpg, int* nJpgLen, char* pJson)
{
quint32 nPicLen = static_cast<quint32>(pData[0]) | (static_cast<quint32>(pData[1]) << 8) | (static_cast<quint32>(pData[2]) << 16) | (static_cast<quint32>(pData[3]) << 24);
qDebug() << "nPicLen =" << nPicLen;
qInfo() << "nPicLen =" << nPicLen;
*nJpgLen = static_cast<int>(nPicLen);
memcpy(pJpg, (pData + 32), *nJpgLen);
const unsigned char* pInfoHead = nullptr;
......
This diff is collapsed. Click to expand it.
......@@ -2,6 +2,7 @@
#define MQTTSUBSCRIBER_H
#include <MQTTClient.h>
#include <MQTTAsync.h>
#include <QTimer>
#include <QObject>
#include "VidesData.h"
......@@ -9,15 +10,25 @@ class MqttSubscriber : public QObject
{
Q_OBJECT
public:
static MqttSubscriber* getInstance(vides_data::MqttConfig& config, QObject* parent = nullptr); ~MqttSubscriber();
static MqttSubscriber* getInstance( QObject* parent = nullptr); ~MqttSubscriber();
void init(vides_data:: MqttConfig& config,QString &httpUrl,QString &serialNumber);
void start();
signals:
void connectionLostSignal();
private slots:
void reconnectAndFetchConfig();
private:
MqttSubscriber(vides_data:: MqttConfig& config, QObject* parent = nullptr);
MqttSubscriber(const MqttSubscriber&) = delete;
MqttSubscriber(QObject* parent = nullptr);
MqttSubscriber() = delete;
MqttSubscriber& operator=(const MqttSubscriber&) = delete;
QTimer *retryTimer;
MQTTAsync client;
QString httpUrl;
QString serialNumber;
vides_data::MqttConfig config;
......@@ -34,7 +45,7 @@ private:
void onPublishSuccess(MQTTAsync_successData* response);
void onPublishFailure(MQTTAsync_failureData* response);
void sendSubscriptionConfirmation(const std::string& messageId);
void sendSubscriptionConfirmation(const vides_data::requestMqttData& response);
static MqttSubscriber* instance;
};
......
File mode changed from 100644 to 100755
File mode changed from 100644 to 100755
......@@ -258,14 +258,19 @@ struct FaceConfig {
struct LicensePlateConfig {
bool isOn;
float carConfidence;
float carConfidenceMax;
float carConfidenceMin;
int licensePlateLen;
quint64 updateAt;
quint64 updateAt;
int maxNum; ///< 识别最大数量
bool useHalf; ///< 是否使用半精度推理模式
float boxConfThreshold; ///< 检测框阈值
float nmsThreshold; ///< 非极大值抑制阈值
float recConfidenceThreshold; ///< 识别置信度阈值
bool isHigh;
};
struct UniformConfig {
bool isOn;
int uniformColor;
......@@ -274,6 +279,12 @@ struct UniformConfig {
quint64 updateAt;
};
struct HumanConfig{
bool isOn;
int humanDetectionLen;
quint64 updateAt;
};
struct MqttConfig {
QString address;
QString clientId;
......@@ -282,6 +293,7 @@ struct MqttConfig {
QString topic;
QString username;
QString password;
quint64 updateAt;
};
struct responseConfig {
......@@ -293,15 +305,38 @@ struct responseConfig {
UniformConfig uniformConfig;
Camera camera;
MqttConfig mqttConfig;
};
struct Devices{
QString id;
QString state;
HumanConfig humanConfig;
};
struct responseMqttData{
uint8_t msg_type;
std::list<Devices>devices;
QString sn;
QString uniq;
};
struct requestMqttData{
QString sn;
int code;
QString uniq;
};
struct DetectionParams {
int newHumanDetectionLen;
int newLicensePlateLen;
int newFaceLen;
QString modelPaths;
float humanCarShapeConfidence;
int uniformColor;
std::map<QString, QString> faceMaps;
int numberFaces;
float faceConfidence;
__uint8_t algorithmPermissions;
bool isHigh;
int maxNum; ///< 识别最大数量
bool useHalf; ///< 是否使用半精度推理模式
float boxConfThreshold; ///< 检测框阈值
float nmsThreshold; ///< 非极大值抑制阈值
float recConfidenceThreshold; ///< 识别置信度阈值
};
inline bool isVirtualMachine()
......@@ -381,21 +416,28 @@ inline bool pingAddress(const QString &address) {
return false;
}
if (!process.waitForFinished(1000)) {
// 使用较长的超时值,确保进程有足够时间完成
if (!process.waitForFinished(3000)) {
process.kill(); // 如果超时,强制终止进程
return false;
}
QString output(process.readAllStandardOutput());
// 此处可以使用更复杂的逻辑来解析 Ping 输出
// 简单的 Ping 成功检查逻辑
#ifdef Q_OS_WIN
return output.contains("TTL=");
#else
return output.contains("1 packets transmitted, 1 received");
#endif
}
inline int GetCpuIdByAsm_arm(char* cpu_id)
{
FILE *fp = fopen("/proc/cpuinfo", "r");
if(nullptr == fp)
{
qDebug()<<"failed to open cpuinfo";
qInfo()<<"failed to open cpuinfo";
return -1;
}
......@@ -431,7 +473,7 @@ inline int GetCpuIdByAsm_arm(char* cpu_id)
inline QString getCpuSerialNumber() {
QFile file("/proc/cpuinfo");
if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) {
qDebug() << "无法打开 /proc/cpuinfo 文件";
qInfo() << "无法打开 /proc/cpuinfo 文件";
return QString();
}
QTextStream in(&file);
......
File added
......@@ -12,8 +12,7 @@ TEMPLATE = app
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.3.0\\\"
DEFINES += APP_VERSION=\\\"1.3.2\\\"
QMAKE_LIBDIR += /usr/local/lib
......@@ -25,6 +24,9 @@ INCLUDEPATH+=/usr/local/include/human
INCLUDEPATH+=/usr/local/include/CImg
INCLUDEPATH+=/usr/local/include/mqtt
# 禁用所有警告
QMAKE_CXXFLAGS += -w
#unix:contains(QMAKE_HOST.arch, x86_64) {
# QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
#}
......@@ -123,13 +125,12 @@ HEADERS += \
BaseAlgorithm.h \
MqttSubscriber.h
#FORMS += \
# mainwindow.ui
# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target
RESOURCES += \
BG.qrc
#RESOURCES += \
# BG.qrc
......@@ -38,13 +38,15 @@ public:
void createDirectory(int flag,const QString& dirName, const QString& successMsg, const QString& failureMsg);
void runOrRebootMqtt(vides_data::MqttConfig &mqtt_config,QString &httpUrl,QString &serialNumber);
void initFaceFaceRecognition();
void initCameras(vides_data::cameraParameters &parameter, vides_data::responseConfig &devConfig, const std::list<vides_data::responseArea>&areas,std::list<vides_data::requestCameraInfo>&camera_info_list);
__uint8_t intToUint8t(bool faceAlgorithm,bool licensePlateAlgorithm,bool uniformAlgorithm);
__uint8_t intToUint8t(bool faceAlgorithm,bool licensePlateAlgorithm,bool uniformAlgorithm,bool humanAlgorithm);
//盒子参数更新
void divParameterUpdate(vides_data::responseConfig &cloudConfig );
void divParameterUpdate(vides_data::responseConfig &cloudConfig,QString &httpUrl,QString &serialNumber );
static MainWindow * sp_this;
......
This diff is collapsed. Click to expand it.
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment