Commit 1542f525 by “liusq”

修改相关日志打印格式

parent c19e68b8
...@@ -153,7 +153,7 @@ void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) { ...@@ -153,7 +153,7 @@ void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) {
} else if (scheType == 0x03) { } else if (scheType == 0x03) {
return schedulingAlgorithmTemplate(faceReconitionHandles, mtxFace); return schedulingAlgorithmTemplate(faceReconitionHandles, mtxFace);
} else { } else {
qInfo() << "参数错误"; qDebug() << "参数错误";
return nullptr; return nullptr;
} }
} }
...@@ -170,10 +170,10 @@ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,s ...@@ -170,10 +170,10 @@ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,s
if (selectedFaceReconition!=nullptr && !selectedFaceReconition->getImageChanged()) { if (selectedFaceReconition!=nullptr && !selectedFaceReconition->getImageChanged()) {
selectedFaceReconition->setIsRunning(true); selectedFaceReconition->setIsRunning(true);
// 调用选定对象的doesItExistEmployee函数 // 调用选定对象的doesItExistEmployee函数
qInfo() << "人脸识别算法抢到===>sn"<<sSn<<selectedFaceReconition; qDebug() << "人脸识别算法抢到===>sn"<<sSn<<selectedFaceReconition;
selectedFaceReconition->doesItExistEmployee(source, faces); selectedFaceReconition->doesItExistEmployee(sSn,source, faces);
} else { } else {
qInfo() << "没有可用的selectedFaceReconition对象可以调度"; qDebug() << "没有可用的selectedFaceReconition对象可以调度";
return ; return ;
} }
} }
...@@ -192,10 +192,10 @@ void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, Q ...@@ -192,10 +192,10 @@ void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, Q
if (selectedLicensePlate!=nullptr) { if (selectedLicensePlate!=nullptr) {
selectedLicensePlate->setIsRunning(true); selectedLicensePlate->setIsRunning(true);
// 调用选定对象的findHuManCar函数 // 调用选定对象的findHuManCar函数
qInfo() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate; qDebug() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate;
selectedLicensePlate->licensePlateNumber(source, lpNumber,plate, currentTime); selectedLicensePlate->licensePlateNumber(source, lpNumber,plate, currentTime);
} else { } else {
qInfo() << "没有可用的selectedLicensePlate对象可以调度"; qDebug() << "没有可用的selectedLicensePlate对象可以调度";
return ; return ;
} }
} }
...@@ -214,11 +214,11 @@ int AlgorithmTaskManage::executeFindHuManCar(const cv::Mat &source, int res, ...@@ -214,11 +214,11 @@ int AlgorithmTaskManage::executeFindHuManCar(const cv::Mat &source, int res,
selectedHumanDetection->setIsRunning(true); selectedHumanDetection->setIsRunning(true);
// 调用选定对象的findHuManCar函数 // 调用选定对象的findHuManCar函数
qInfo() << "人形调度算法抢到===>sn"<<sSn<<"res"<<selectedHumanDetection; qDebug() << "人形调度算法抢到===>sn"<<sSn<<"res"<<selectedHumanDetection;
int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, heightReference,currentPlate); int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, sSn,heightReference,currentPlate);
return detectionResult; return detectionResult;
} else { } else {
qInfo() << "没有可用的HumanDetection对象可以调度"; qDebug() << "没有可用的HumanDetection对象可以调度";
return -2; return -2;
} }
} }
...@@ -127,6 +127,10 @@ public: ...@@ -127,6 +127,10 @@ public:
void setMediaHandle(int mediaHandle); void setMediaHandle(int mediaHandle);
void initAlgorithmPermissions(__uint8_t algorithm); void initAlgorithmPermissions(__uint8_t algorithm);
void initFaceFrequency(uint64 face_frequency);
void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas); void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas);
bool compareLists(const std::list<vides_data::responseArea>& newAreas); bool compareLists(const std::list<vides_data::responseArea>& newAreas);
......
...@@ -192,7 +192,7 @@ cv::Mat FaceReconitionHandle::loadImageFromByteStream(const QString& filePath) { ...@@ -192,7 +192,7 @@ cv::Mat FaceReconitionHandle::loadImageFromByteStream(const QString& filePath) {
void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){ void FaceReconitionHandle::doesItExistEmployee(const QString &sn,const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release); thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
ScopeSemaphoreExit streamGuard([this]() { ScopeSemaphoreExit streamGuard([this]() {
isRunning.store(false, std::memory_order_release); isRunning.store(false, std::memory_order_release);
...@@ -211,14 +211,14 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v ...@@ -211,14 +211,14 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
ret = HF_CreateImageStream(&imageData, &imageSteamHandle); ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
if (ret != HSUCCEED) { if (ret != HSUCCEED) {
qInfo()<<QString("image handle error:%1").arg((long) imageSteamHandle,0,10); qInfo()<<QString("SN(%1): image handle error:%2").arg(sn).arg((long)imageSteamHandle,0,10);
return ; return ;
} }
HF_MultipleFaceData multipleFaceData = {0}; HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData); HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) { if (multipleFaceData.detectedNum <= 0) {
qInfo()<<QString("search 未检测到人脸"); qInfo()<<QString("SN(%1): 未检测到人脸").arg(sn);
return ; return ;
} }
...@@ -227,11 +227,12 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v ...@@ -227,11 +227,12 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
HInt32 featureNum; HInt32 featureNum;
HF_GetFeatureLength(ctxHandle, &featureNum); HF_GetFeatureLength(ctxHandle, &featureNum);
for(int j=0;j< multipleFaceData.detectedNum; ++j){ for(int j=0;j< multipleFaceData.detectedNum; ++j){
qInfo()<<QString("doesItExistEmployee==>面部索引: %1").arg(j); //qInfo()<<QString("doesItExistEmployee==>面部索引: %1").arg(j);
qDebug() << QString("SN(%1): 面部索引:%2").arg(sn).arg(j);
std::vector<float> newfeature(featureNum,0.0f); std::vector<float> newfeature(featureNum,0.0f);
ret = HF_FaceFeatureExtractCpy(ctxHandle, imageSteamHandle, multipleFaceData.tokens[j], newfeature.data()); ret = HF_FaceFeatureExtractCpy(ctxHandle, imageSteamHandle, multipleFaceData.tokens[j], newfeature.data());
if(ret != HSUCCEED) { if(ret != HSUCCEED) {
qInfo()<<QString("特征提取出错: %1").arg(ret); qInfo() << QString("SN(%1): 特征提取出错:%2").arg(sn).arg(ret);
HF_ReleaseImageStream(imageSteamHandle); HF_ReleaseImageStream(imageSteamHandle);
return ; return ;
} }
...@@ -248,13 +249,12 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v ...@@ -248,13 +249,12 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
HFloat confidence; HFloat confidence;
ret = HF_FeaturesGroupFeatureSearch(ctxHandle, feature, &confidence, &searchIdentity); ret = HF_FeaturesGroupFeatureSearch(ctxHandle, feature, &confidence, &searchIdentity);
if (ret != HSUCCEED) { if (ret != HSUCCEED) {
qInfo()<<QString("搜索失败: %1").arg(ret); qInfo() << QString("SN(%1): 搜索失败:%2").arg(sn).arg(ret);
return ; return ;
} }
qInfo() << QString("SN(%1): 搜索置信度:%2").arg(sn).arg(confidence);
qInfo()<<QString("搜索置信度: %1").arg(confidence); qInfo() << QString("SN(%1): 匹配到的tag:%2").arg(sn).arg(searchIdentity.tag);
qInfo()<<QString("匹配到的tag: %1").arg(searchIdentity.tag); qInfo() << QString("SN(%1): 匹配到的customId:%2").arg(sn).arg(searchIdentity.customId);
qInfo()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
// Face Pipeline // Face Pipeline
//printf("人脸特征数量: %d", faceNum); //printf("人脸特征数量: %d", faceNum);
if (confidence > configConfidence) { if (confidence > configConfidence) {
...@@ -275,7 +275,7 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v ...@@ -275,7 +275,7 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
// printf("image released"); // printf("image released");
} else { } else {
//printf("image release error: %ld", ret); //printf("image release error: %ld", ret);
qInfo()<<QString("image release error: %1").arg(ret); qInfo() << QString("SN(%1): image release error:%2").arg(sn).arg(ret);
} }
} }
...@@ -33,7 +33,7 @@ public: ...@@ -33,7 +33,7 @@ public:
void setImageChanged(bool imageChanged); void setImageChanged(bool imageChanged);
void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face); void doesItExistEmployee(const QString &sn,const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face);
void initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence); void initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence);
......
...@@ -354,11 +354,6 @@ vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &h ...@@ -354,11 +354,6 @@ vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &h
resp->code=map["code"].toInt(); resp->code=map["code"].toInt();
resp->msg=map["message"].toString(); resp->msg=map["message"].toString();
}else{ }else{
qInfo()<<"httpPostFacePopulation===>";
qInfo()<<m_httpClient.errorCode();
qInfo()<<m_httpClient.errorString();
qInfo()<<"httpPostFacePopulation===>end";
resp->code=2; resp->code=2;
resp->msg=m_httpClient.errorCode(); resp->msg=m_httpClient.errorCode();
} }
...@@ -511,7 +506,6 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber, ...@@ -511,7 +506,6 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
QJsonObject faceConfigObj = dataObj["faceConfig"].toObject(); QJsonObject faceConfigObj = dataObj["faceConfig"].toObject();
config.faceConfig.isOn = faceConfigObj["isOn"].toBool(); config.faceConfig.isOn = faceConfigObj["isOn"].toBool();
config.faceConfig.faceNumbers = faceConfigObj["faceNumbers"].toInt(); config.faceConfig.faceNumbers = faceConfigObj["faceNumbers"].toInt();
config.faceConfig.faceFrequency = faceConfigObj["faceFrequency"].toInt();
config.faceConfig.confidence = faceConfigObj["confidence"].toVariant().toFloat(); config.faceConfig.confidence = faceConfigObj["confidence"].toVariant().toFloat();
config.faceConfig.updateAt = faceConfigObj["updateAt"].toVariant().toULongLong(); config.faceConfig.updateAt = faceConfigObj["updateAt"].toVariant().toULongLong();
config.faceConfig.faceLen=faceConfigObj["faceLen"].toInt(); config.faceConfig.faceLen=faceConfigObj["faceLen"].toInt();
...@@ -542,6 +536,7 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber, ...@@ -542,6 +536,7 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
QJsonObject humanConfigObj = dataObj["humanConfig"].toObject(); QJsonObject humanConfigObj = dataObj["humanConfig"].toObject();
config.humanConfig.isOn=humanConfigObj["isOn"].toBool(); config.humanConfig.isOn=humanConfigObj["isOn"].toBool();
config.humanConfig.updateAt = humanConfigObj["updateAt"].toVariant().toULongLong(); config.humanConfig.updateAt = humanConfigObj["updateAt"].toVariant().toULongLong();
config.humanConfig.faceFrequency = humanConfigObj["faceFrequency"].toVariant().toUInt();
// 解析 devicesConfig // 解析 devicesConfig
QJsonObject devicesConfigObj = dataObj["camera"].toObject(); QJsonObject devicesConfigObj = dataObj["camera"].toObject();
......
...@@ -77,7 +77,7 @@ void HumanDetection::setHuManParameter(int &uniformColor){ ...@@ -77,7 +77,7 @@ void HumanDetection::setHuManParameter(int &uniformColor){
} }
//0 人形 1 车形 2 工服 //0 人形 1 车形 2 工服
int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap, float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate) { int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap,QString &sSn, float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate) {
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release); thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
TCV_CameraStream *stream = TCV_CreateCameraStream(); TCV_CameraStream *stream = TCV_CreateCameraStream();
...@@ -100,8 +100,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int ...@@ -100,8 +100,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
if (res == 0x00 || res == 0x02) { if (res == 0x00 || res == 0x02) {
num = TCV_HumanDetectorGetNumOfHuman(detector); num = TCV_HumanDetectorGetNumOfHuman(detector);
qInfo()<<"TCV_HumanDetectorGetNumOfHuman==>"<<num; qInfo() << QString("SN(%1): 获取人体数量:%2").arg(sSn).arg(num);
if (num == 0) return num; // 无行人检测结果,提前返回 if (num == 0) return num; // 无行人检测结果,提前返回
std::vector<TCV_ObjectLocation> results(num); std::vector<TCV_ObjectLocation> results(num);
TCV_HumanDetectorGetHumanLocation(detector, results.data(), num); TCV_HumanDetectorGetHumanLocation(detector, results.data(), num);
...@@ -133,8 +132,6 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int ...@@ -133,8 +132,6 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
} }
reMap[0x02] = count_no_uniform; // 未穿工服的行人数量 reMap[0x02] = count_no_uniform; // 未穿工服的行人数量
reMap[0x00] = count_all; // 所有满足条件的行人数量 reMap[0x00] = count_all; // 所有满足条件的行人数量
qInfo()<<"count_all==>"<<count_all;
qInfo()<<"count_no_uniform==>"<<count_no_uniform;
num = (res == 0x00) ? count_all : count_no_uniform; num = (res == 0x00) ? count_all : count_no_uniform;
} }
......
...@@ -16,7 +16,7 @@ public: ...@@ -16,7 +16,7 @@ public:
float carShapeConfidence); float carShapeConfidence);
~HumanDetection(); ~HumanDetection();
int findHuManCar(const cv::Mat &source,int res,std::map<int,int>&reMap, int findHuManCar(const cv::Mat &source,int res,std::map<int,int>&reMap,QString &sSn,
float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate); float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate);
void setHuManParameter(int &uniformColor); void setHuManParameter(int &uniformColor);
......
...@@ -82,7 +82,6 @@ void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const ...@@ -82,7 +82,6 @@ void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const
} else { } else {
type = types[results.plates[i].type]; type = types[results.plates[i].type];
} }
qInfo()<<QString("车牌号:%1").arg(results.plates[i].code);
QString plateResult = QString("第%1个,%2,车牌号:%3,置信度:%4,左上角点x坐标:%5,左上角点y坐标:%6,右下角点x坐标:%7,右下角点y坐标:%8") QString plateResult = QString("第%1个,%2,车牌号:%3,置信度:%4,左上角点x坐标:%5,左上角点y坐标:%6,右下角点x坐标:%7,右下角点y坐标:%8")
.arg(i + 1).arg(type.c_str()).arg(results.plates[i].code) .arg(i + 1).arg(type.c_str()).arg(results.plates[i].code)
.arg(results.plates[i].text_confidence).arg(results.plates[i].x1, 0, 'Q', 4) .arg(results.plates[i].text_confidence).arg(results.plates[i].x1, 0, 'Q', 4)
...@@ -168,7 +167,6 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString ...@@ -168,7 +167,6 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
newPlate.new_color=QString::fromStdString(type); newPlate.new_color=QString::fromStdString(type);
QString car_nuber=QString::fromUtf8(results.plates[i].code); QString car_nuber=QString::fromUtf8(results.plates[i].code);
replaceWith1And0(car_nuber); replaceWith1And0(car_nuber);
qInfo()<<"I O (i o)大小写替换为 1 0结果:==>"<<car_nuber;
newPlate.new_plate=car_nuber; newPlate.new_plate=car_nuber;
newPlate.text_confidence=results.plates[i].text_confidence; newPlate.text_confidence=results.plates[i].text_confidence;
vides_data::ParkingArea area; vides_data::ParkingArea area;
......
...@@ -12,7 +12,7 @@ ...@@ -12,7 +12,7 @@
#include <QTextStream> #include <QTextStream>
#include <QTextCodec> #include <QTextCodec>
const int g_logLimitSize = 5; const int g_logLimitSize = 45;
struct LogHandlerPrivate { struct LogHandlerPrivate {
LogHandlerPrivate(); LogHandlerPrivate();
......
...@@ -25,7 +25,7 @@ LogHandlerPrivate::LogHandlerPrivate() { ...@@ -25,7 +25,7 @@ LogHandlerPrivate::LogHandlerPrivate() {
openAndBackupLogFile(); openAndBackupLogFile();
// 十分钟检查一次日志文件创建时间 // 十分钟检查一次日志文件创建时间
renameLogFileTimer.setInterval(1000*2); // TODO: 可从配置文件读取 renameLogFileTimer.setInterval(1000 * 60 * 10); // TODO: 可从配置文件读取
renameLogFileTimer.start(); renameLogFileTimer.start();
QObject::connect(&renameLogFileTimer, &QTimer::timeout, [this] { QObject::connect(&renameLogFileTimer, &QTimer::timeout, [this] {
QMutexLocker locker(&LogHandlerPrivate::logMutex); QMutexLocker locker(&LogHandlerPrivate::logMutex);
...@@ -91,9 +91,16 @@ void LogHandlerPrivate::openAndBackupLogFile() { ...@@ -91,9 +91,16 @@ void LogHandlerPrivate::openAndBackupLogFile() {
logFile->close(); logFile->close();
delete logOut; delete logOut;
delete logFile; delete logFile;
QDate renameDate = logFileCreatedDate;
if (logFileCreatedDate == QDate::currentDate()) {
renameDate = QDate::currentDate().addDays(-1); // 设置为昨天的日期
}
QString newLogPath = logDir.absoluteFilePath(logFileCreatedDate.toString("yyyy-MM-dd.log")); QString newLogPath = logDir.absoluteFilePath(renameDate.toString("yyyy-MM-dd.log"));
QFile::rename(logPath, newLogPath); // 将原始日志文件重命名为新的文件名 // QFile::rename(logPath, newLogPath); // 将原始日志文件重命名为新的文件名
QFile::copy(logPath, newLogPath);
QFile::remove(logPath);
logFile = new QFile(logPath); logFile = new QFile(logPath);
logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : nullptr; logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : nullptr;
...@@ -105,8 +112,8 @@ void LogHandlerPrivate::openAndBackupLogFile() { ...@@ -105,8 +112,8 @@ void LogHandlerPrivate::openAndBackupLogFile() {
// 检测当前日志文件大小 // 检测当前日志文件大小
void LogHandlerPrivate::checkLogFiles() { void LogHandlerPrivate::checkLogFiles() {
// 如果 protocal.log 文件大小超过5M,重新创建一个日志文件,原文件存档为yyyy-MM-dd_hhmmss.log // 如果 protocal.log 文件大小超过55M,重新创建一个日志文件,原文件存档为yyyy-MM-dd_hhmmss.log
if (logFile->size() > 1024*g_logLimitSize) { if (logFile->size() > 1024*1024*g_logLimitSize) {
logFile->flush(); logFile->flush();
logFile->close(); logFile->close();
delete logOut; delete logOut;
...@@ -114,7 +121,11 @@ void LogHandlerPrivate::checkLogFiles() { ...@@ -114,7 +121,11 @@ void LogHandlerPrivate::checkLogFiles() {
QString logPath = logDir.absoluteFilePath("today.log"); // 日志的路径 QString logPath = logDir.absoluteFilePath("today.log"); // 日志的路径
QString newLogPath = logDir.absoluteFilePath(logFileCreatedDate.toString("yyyy-MM-dd.log")); QString newLogPath = logDir.absoluteFilePath(logFileCreatedDate.toString("yyyy-MM-dd.log"));
QFile::rename(logPath, newLogPath); // QFile::rename(logPath, newLogPath);
QFile::copy(logPath, newLogPath);
QFile::remove(logPath);
logFile = new QFile(logPath); logFile = new QFile(logPath);
logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : NULL; logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : NULL;
logFileCreatedDate = QDate::currentDate(); logFileCreatedDate = QDate::currentDate();
......
...@@ -230,7 +230,7 @@ int MediaFaceImage::ToFile(const char* pFileName, const void* pData, int nLength ...@@ -230,7 +230,7 @@ int MediaFaceImage::ToFile(const char* pFileName, const void* pData, int nLength
// return pInOutBufferSize; // pOutBuffer由智能指针管理,此处无需手动释放 // return pInOutBufferSize; // pOutBuffer由智能指针管理,此处无需手动释放
//} //}
int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat &image) { int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat &image,QString &sSn) {
const int BufferSize = 1024 * 1024 * 2; // 缓冲区大小 const int BufferSize = 1024 * 1024 * 2; // 缓冲区大小
image.release(); // 释放之前的图像 image.release(); // 释放之前的图像
std::unique_ptr<unsigned char[]> pOutBuffer(new unsigned char[BufferSize]); // 智能指针管理内存 std::unique_ptr<unsigned char[]> pOutBuffer(new unsigned char[BufferSize]); // 智能指针管理内存
...@@ -238,7 +238,7 @@ int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat ...@@ -238,7 +238,7 @@ int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat
int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer.get(), &pInOutBufferSize); int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer.get(), &pInOutBufferSize);
if (ret < 0 || pInOutBufferSize <= 0) { if (ret < 0 || pInOutBufferSize <= 0) {
qInfo() << "同步设备端抓图失败"; qInfo() <<sSn <<":同步设备端抓图失败";
return -1; return -1;
} }
...@@ -249,12 +249,12 @@ int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat ...@@ -249,12 +249,12 @@ int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat
try { try {
cv::Mat decodedImage = cv::imdecode(buffer, cv::IMREAD_UNCHANGED); cv::Mat decodedImage = cv::imdecode(buffer, cv::IMREAD_UNCHANGED);
if (decodedImage.empty()) { if (decodedImage.empty()) {
qInfo() << "图像解码失败"; qInfo() << sSn<<":图像解码失败";
return -1; return -1;
} }
image = std::move(decodedImage); image = std::move(decodedImage);
} catch (const cv::Exception& e) { } catch (const cv::Exception& e) {
qInfo() << "图像解码过程中捕获异常:" << e.what(); qInfo() << sSn<<":图像解码过程中捕获异常:" << e.what();
return -1; return -1;
} }
......
...@@ -18,7 +18,7 @@ public: ...@@ -18,7 +18,7 @@ public:
static MediaFaceImage* getInstance(); // 单例模式获取实例的静态成员函数 static MediaFaceImage* getInstance(); // 单例模式获取实例的静态成员函数
void ParserImageData(const unsigned char* pData, int nDataLen, char* pJpg, int* nJpgLen, char* pJson); void ParserImageData(const unsigned char* pData, int nDataLen, char* pJpg, int* nJpgLen, char* pJson);
int FaceImageCallBack(XSDK_HANDLE hMedia,int nChannel,cv::Mat &image); int FaceImageCallBack(XSDK_HANDLE hMedia,int nChannel,cv::Mat &image,QString &sSn);
int CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer); int CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer);
int ToFile(const char* pFileName, const void* pData, int nLenght); int ToFile(const char* pFileName, const void* pData, int nLenght);
......
...@@ -10,7 +10,6 @@ ParkingSpaceInfo::ParkingSpaceInfo(){ ...@@ -10,7 +10,6 @@ ParkingSpaceInfo::ParkingSpaceInfo(){
} }
ParkingSpaceInfo::~ParkingSpaceInfo(){ ParkingSpaceInfo::~ParkingSpaceInfo(){
qInfo() << "ParkingSpaceInfo:关闭"; qInfo() << "ParkingSpaceInfo:关闭";
} }
void ParkingSpaceInfo::addQueue(RecognizedInfo &info){ void ParkingSpaceInfo::addQueue(RecognizedInfo &info){
QMutexLocker locker(&queueMutex); QMutexLocker locker(&queueMutex);
......
...@@ -250,7 +250,6 @@ struct Camera { ...@@ -250,7 +250,6 @@ struct Camera {
struct FaceConfig { struct FaceConfig {
bool isOn; bool isOn;
int faceNumbers; int faceNumbers;
uint64 faceFrequency;
float confidence; float confidence;
int faceLen; int faceLen;
quint64 updateAt; quint64 updateAt;
...@@ -281,7 +280,7 @@ struct UniformConfig { ...@@ -281,7 +280,7 @@ struct UniformConfig {
}; };
struct HumanConfig{ struct HumanConfig{
bool isOn; bool isOn;
int humanDetectionLen; uint64 faceFrequency;
quint64 updateAt; quint64 updateAt;
}; };
......
...@@ -13,7 +13,8 @@ TEMPLATE = app ...@@ -13,7 +13,8 @@ TEMPLATE = app
# deprecated API in order to know how to port your code away from it. # deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.3.2\\\" DEFINES += APP_VERSION=\\\"1.3.2\\\"
DEFINES += QT_MESSAGELOGCONTEXT
DEFINES += QT_NO_DEBUG_OUTPUT
QMAKE_LIBDIR += /usr/local/lib QMAKE_LIBDIR += /usr/local/lib
INCLUDEPATH+=/usr/local/include/opencv4 INCLUDEPATH+=/usr/local/include/opencv4
...@@ -24,6 +25,9 @@ INCLUDEPATH+=/usr/local/include/human ...@@ -24,6 +25,9 @@ INCLUDEPATH+=/usr/local/include/human
INCLUDEPATH+=/usr/local/include/CImg INCLUDEPATH+=/usr/local/include/CImg
INCLUDEPATH+=/usr/local/include/mqtt INCLUDEPATH+=/usr/local/include/mqtt
# 禁用所有警告 # 禁用所有警告
QMAKE_CXXFLAGS += -w QMAKE_CXXFLAGS += -w
......
...@@ -557,7 +557,8 @@ void MainWindow::startCamera(const QString &httpurl){ ...@@ -557,7 +557,8 @@ void MainWindow::startCamera(const QString &httpurl){
reStatus.camera_info_list.push_front(camera_info); reStatus.camera_info_list.push_front(camera_info);
__uint8_t new_algorithm= intToUint8t(devConfig.faceConfig.isOn,devConfig.licensePlateConfig.isOn,devConfig.uniformConfig.isOn,devConfig.humanConfig.isOn); __uint8_t new_algorithm= intToUint8t(devConfig.faceConfig.isOn,devConfig.licensePlateConfig.isOn,devConfig.uniformConfig.isOn,devConfig.humanConfig.isOn);
uint64 face_frequency=devConfig.humanConfig.faceFrequency;
offlineCameraHandle->initFaceFrequency(face_frequency);
offlineCameraHandle->cameraParameterUpdate(devConfig); offlineCameraHandle->cameraParameterUpdate(devConfig);
offlineCameraHandle->initAlgorithmPermissions(new_algorithm); offlineCameraHandle->initAlgorithmPermissions(new_algorithm);
if(!offlineCameraHandle->compareLists(device.areas)){ if(!offlineCameraHandle->compareLists(device.areas)){
...@@ -724,7 +725,6 @@ void MainWindow::initEncodeToString(QString &enCodeJson) { ...@@ -724,7 +725,6 @@ void MainWindow::initEncodeToString(QString &enCodeJson) {
} }
bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &workSpWMn,QString &sn){ bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &workSpWMn,QString &sn){
qInfo()<<"iniWorkSpVMn=="<<sn;
QString jsonfile=QCoreApplication::applicationDirPath()+"/camera_config.json"; QString jsonfile=QCoreApplication::applicationDirPath()+"/camera_config.json";
bool isEqual=true; bool isEqual=true;
...@@ -975,8 +975,6 @@ __uint8_t MainWindow::intToUint8t(bool faceAlgorithm, bool licensePlateAlgorithm ...@@ -975,8 +975,6 @@ __uint8_t MainWindow::intToUint8t(bool faceAlgorithm, bool licensePlateAlgorithm
// 车牌识别对应最低位(第0位) // 车牌识别对应最低位(第0位)
result |= (licensePlateAlgorithm ? 1 : 0); result |= (licensePlateAlgorithm ? 1 : 0);
qInfo()<<"算法结果"<<result;
return result; return result;
} }
void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data::responseConfig &devConfig,const std::list<vides_data::responseArea>&areas,std::list<vides_data::requestCameraInfo>&camera_info_list){ void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data::responseConfig &devConfig,const std::list<vides_data::responseArea>&areas,std::list<vides_data::requestCameraInfo>&camera_info_list){
...@@ -988,9 +986,10 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data: ...@@ -988,9 +986,10 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data:
CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,image_save,heightReference,devConfig); CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,image_save,heightReference,devConfig);
int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,3000); int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,3000);
qInfo()<<"句柄为2:"<<sdk_handle; qDebug() << QString("SN(%1): 句柄为%2").arg(parameter.sSn).arg(sdk_handle);
if(sdk_handle<=0){ if(sdk_handle<=0){
qInfo() << "登录失败"; qInfo() << QString("SN(%1): 登录失败").arg(parameter.sSn);
return ; return ;
} }
mediaFaceImage->setMap(sdk_handle,cameraHandle); mediaFaceImage->setMap(sdk_handle,cameraHandle);
...@@ -1002,7 +1001,7 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data: ...@@ -1002,7 +1001,7 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data:
int synTime=devConfig.camera.devSnapSynTimer; int synTime=devConfig.camera.devSnapSynTimer;
uint64 face_frequency=devConfig.faceConfig.faceFrequency; uint64 face_frequency=devConfig.humanConfig.faceFrequency;
float carConfidenceMax=devConfig.licensePlateConfig.carConfidenceMax; float carConfidenceMax=devConfig.licensePlateConfig.carConfidenceMax;
float carConfidenceMin=devConfig.licensePlateConfig.carConfidenceMin; float carConfidenceMin=devConfig.licensePlateConfig.carConfidenceMin;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment