Commit 1542f525 by “liusq”

修改相关日志打印格式

parent c19e68b8
......@@ -153,7 +153,7 @@ void* AlgorithmTaskManage::schedulingAlgorithm(int scheType) {
} else if (scheType == 0x03) {
return schedulingAlgorithmTemplate(faceReconitionHandles, mtxFace);
} else {
qInfo() << "参数错误";
qDebug() << "参数错误";
return nullptr;
}
}
......@@ -170,10 +170,10 @@ void AlgorithmTaskManage::executeFindDoesItExistEmployee(const cv::Mat &source,s
if (selectedFaceReconition!=nullptr && !selectedFaceReconition->getImageChanged()) {
selectedFaceReconition->setIsRunning(true);
// 调用选定对象的doesItExistEmployee函数
qInfo() << "人脸识别算法抢到===>sn"<<sSn<<selectedFaceReconition;
selectedFaceReconition->doesItExistEmployee(source, faces);
qDebug() << "人脸识别算法抢到===>sn"<<sSn<<selectedFaceReconition;
selectedFaceReconition->doesItExistEmployee(sSn,source, faces);
} else {
qInfo() << "没有可用的selectedFaceReconition对象可以调度";
qDebug() << "没有可用的selectedFaceReconition对象可以调度";
return ;
}
}
......@@ -192,10 +192,10 @@ void AlgorithmTaskManage::executeFindlicensePlateNumber(const cv::Mat &source, Q
if (selectedLicensePlate!=nullptr) {
selectedLicensePlate->setIsRunning(true);
// 调用选定对象的findHuManCar函数
qInfo() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate;
qDebug() << "车牌调度算法抢到===>sn"<<sSn<<selectedLicensePlate;
selectedLicensePlate->licensePlateNumber(source, lpNumber,plate, currentTime);
} else {
qInfo() << "没有可用的selectedLicensePlate对象可以调度";
qDebug() << "没有可用的selectedLicensePlate对象可以调度";
return ;
}
}
......@@ -214,11 +214,11 @@ int AlgorithmTaskManage::executeFindHuManCar(const cv::Mat &source, int res,
selectedHumanDetection->setIsRunning(true);
// 调用选定对象的findHuManCar函数
qInfo() << "人形调度算法抢到===>sn"<<sSn<<"res"<<selectedHumanDetection;
int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, heightReference,currentPlate);
qDebug() << "人形调度算法抢到===>sn"<<sSn<<"res"<<selectedHumanDetection;
int detectionResult = selectedHumanDetection->findHuManCar(source, res,resMap, sSn,heightReference,currentPlate);
return detectionResult;
} else {
qInfo() << "没有可用的HumanDetection对象可以调度";
qDebug() << "没有可用的HumanDetection对象可以调度";
return -2;
}
}
......@@ -74,7 +74,7 @@ int CameraHandle::sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName
loginParam->nCnnType=EDEV_CNN_TYPE_AUTO;
int loginResult =XSDK_DevLoginSyn(loginParam,nTimeout);
if(loginResult<0){
qInfo() << "登录设备失败";
qInfo() << QString("SN(%1): 登录设备失败").arg(sSn);;
return loginResult;
}
this->hDevice=loginResult;
......@@ -270,15 +270,16 @@ void CameraHandle::cameraParameterUpdate(vides_data::responseConfig &cloudConfig
}
}
if(this->face_frequency!=cloudConfig.faceConfig.faceFrequency){
this->face_frequency=cloudConfig.faceConfig.faceFrequency;
}
float carConfidenceMax=cloudConfig.licensePlateConfig.carConfidenceMax;
float carConfidenceMin=cloudConfig.licensePlateConfig.carConfidenceMin;
this->setCarConfidenceMaxAndMin(carConfidenceMax,carConfidenceMin);
}
void CameraHandle::initFaceFrequency(uint64 face_frequency){
this->face_frequency=face_frequency;
}
void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer,uint64 face_frequency) {
connect(dev_snap_syn_timer, &QTimer::timeout, this, [this,hDevice]() {
......@@ -295,7 +296,7 @@ void CameraHandle::sdkRealTimeDevSnapSyn(int hDevice) {
};
if (threadPool->activeThreadCount() >= threadPool->maxThreadCount()) {
qInfo() << "任务积压,跳过本次执行";
qInfo() << QString("SN(%1): 任务积压,跳过本次执行").arg(sSn);
return;
}
auto taskRunnable = new TaskRunnable(taskSyn, hDevice, this->channel, RunFunction::SdkDevSnapSyn);
......@@ -368,7 +369,7 @@ void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
}
int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
if (!semaphore.tryAcquire()) {
qInfo() <<sSn<<"sdkDevSnapSyn:正在执行线程";
qInfo() << QString("SN(%1): sdkDevSnapSyn:正在执行线程").arg(sSn);
return -1;
}
ScopeSemaphoreExit guard([this]() {
......@@ -395,7 +396,7 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
cv::Mat image;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch();
mediaFaceImage->FaceImageCallBack(hObject,this->channel,image);
mediaFaceImage->FaceImageCallBack(hObject,this->channel,image,sSn);
if (image.empty())
{
......@@ -413,11 +414,11 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
if(hDevice<=0){
qInfo() << "相机断线";
qInfo() << QString("SN(%1): 相机断线").arg(sSn);
return;
}
if (!semaphore.tryAcquire()) {
qInfo() << sSn<<"callbackFunction:正在执行线程";
qInfo() << QString("SN(%1): callbackFunction:正在执行线程").arg(sSn);;
return ;
}
ScopeSemaphoreExit guard([this]() {
......@@ -426,17 +427,14 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
cv::Mat image;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch();
int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image);
int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image,sSn);
if (ret < 0) {
offlineCount++; // 累加计数器
qInfo() << "offlineCount: " << loginParam->sDevId<<offlineCount;
if (offlineCount >= 3) { // 判断是否连续3次返回0
qInfo() << "设备离线";
qInfo() <<QString("SN(%1): 设备离线").arg(sSn);
QString ip=QString::fromUtf8(loginParam->sDevId);
bool is_ping=vides_data::pingAddress(ip);
qInfo() << "ping 的结果"<<is_ping;
qDebug() << sSn<<":ping 的结果"<<is_ping;
if(is_ping){
deviceReboot();
......@@ -457,11 +455,12 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
}
if (image.empty())
{
qInfo() << "Failed to read the image";
qInfo()<<QString("SN(%1): Failed to read the image").arg(sSn);
return ;
}
if (image.rows <= 0 || image.cols <= 0 || image.channels() <= 0) {
qInfo() << "图像尺寸或通道数不正确,需排查原因";
qInfo()<<QString("SN(%1): 图像尺寸或通道数不正确,需排查原因").arg(sSn);
return ;
}
updateImage(image, currentTime);
......@@ -483,8 +482,9 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
}
}
qInfo() << "最新车牌" << newInfo.getLicensePlate() << "区域当前车牌" << park->getCurrentPlate().getLicensePlate();
qInfo() << "不同的区域:" << park->getSpaceIndex() << ",数量:" << count;
qInfo() << QString("SN(%1): 最新车牌%2,当前区域车牌:%3").arg(sSn).arg(newInfo.getLicensePlate()).arg(park->getCurrentPlate().getLicensePlate());
qInfo() << QString("SN(%1): 区域:%2,数量:%3").arg(sSn).arg(park->getSpaceIndex()).arg(count);
if (count>= 3) {
//第一次进场 当前车牌就是进来这个,老车牌就是空
if(park->getCurrentPlate().getLicensePlate().length()<=0){
......@@ -499,17 +499,17 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
std::map<int,int>resMap;
int car_size =algorithmTaskManage.executeFindHuManCar(frame,0x01,currentPlates,resMap,sSn,heightReference);
qInfo()<<sSn<<":"<<"当前车形数量:"<<car_size;
qInfo()<<QString("SN(%1): 当前车形数量:%2").arg(sSn).arg(car_size);
if (car_size <= 0 && car_size!=-2) {
qInfo() << sSn<<"区域:"<<park->getSpaceIndex() << ": 出场:";
qInfo()<<QString("SN(%1): 区域:%2出场").arg(sSn).arg(park->getSpaceIndex());
//如果有车辆检测到并且不在停车区域内部,视为出场
park->setCurrentPlate(newInfo);
result = Exit;
}else {
// 没有车辆或车辆在停车区域内部,移除队列
park->removeNoQueue();
qInfo() << sSn << ": no出场:" << car_size;
qDebug()<<QString("SN(%1): no出场::%2").arg(sSn).arg(car_size);
}
}else{
//当前不为空,新车,新车入场,老车出场
......@@ -601,8 +601,8 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
int faSize =0;
std::vector<vides_data::ParkingArea> currentPlates;
int uniforms=0x00;
qInfo()<<"updateImage"<<algorithmPermissions;
std::map<int,int>resMap;
qInfo()<<QString("SN(%1): 相机算法:%2").arg(sSn).arg(algorithmPermissions);
if ((algorithmPermissions & 0x01<<3) != 0) {
//穿工服算法
if ((algorithmPermissions & 0x01<<2) != 0) {
......@@ -620,7 +620,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
}
}
if(uniforms==-2 || faSize==-2){
qInfo() << "没有可用的HumanDetection对象可以调度";
qInfo()<<QString("SN(%1): 没有可用的HumanDetection对象可以调度").arg(sSn);
return ;
}
QPoint point_info(faSize,uniforms);
......@@ -630,7 +630,6 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
if ((algorithmPermissions & 0x01<<2) != 0) {
worker = (faSize - uniforms > 0) ? (faSize - uniforms) : 0;
}
qInfo()<<"工作人数==>"<<worker;
vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,worker,sSn,currentTime);
if (resp->code!= 0) {
......@@ -639,10 +638,17 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
httpService.setHttpUrl(httpUrl);
vides_data::response* img_resp = httpService.httpPostFacePopulation(num_imgs,faSize,worker,sSn,currentTime);
if (img_resp->code != 0) {
qInfo()<<"不加图片人数变化推送信息推送失败";
qInfo()<<QString("SN(%1):不加图片人数变化推送信息推送失败,失败信息%2").arg(sSn).arg(img_resp->msg);
}else {
qInfo()<<QString("SN(%1): 不加图片人数变化推送信息推送成功").arg(sSn);
}
instace.deleteObj(img_resp);
}else{
qInfo()<<QString("SN(%1): 人数变化推送失败,失败信息%2").arg(sSn).arg(resp->msg);
}
}else {
qInfo()<<QString("SN(%1): 人数变化推送成功").arg(sSn);
}
instace.deleteObj(resp);
......@@ -680,13 +686,16 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
httpService.setHttpUrl(httpUrl);
vides_data::response* img_resp = httpService.httpPostFaceReconition(faceReconition);
if (img_resp->code != 0) {
qInfo()<<"不加图片人脸信息推送失败";
qInfo()<<"不加图片识别人code"<<resp->code;
qInfo()<<"不加图片识别人msg"<<resp->msg;
qInfo()<<"不加图片识别人脸信息推送失败"<<face.id;
qInfo()<<QString("SN(%1): 不加图片人脸信息推送失败,失败信息%2,人脸id%3").arg(sSn).arg(resp->msg).arg(face.id);
}else {
qInfo()<<QString("SN(%1): 不加图片人脸信息推送成功").arg(sSn);
}
instace.deleteObj(img_resp);
}else {
qInfo()<<QString("SN(%1): 人脸信息推送失败,失败信息%2").arg(sSn).arg(resp->msg);
}
}else{
qInfo()<<QString("SN(%1): 人脸信息推送成功").arg(sSn);
}
instace.deleteObj(resp);
}
......@@ -707,18 +716,26 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
httpService.setHttpUrl(httpUrl);
vides_data::response* img_resp=httpService.httpPostUniforms(num_imgs,strUniform, sSn,currentTime);
if (img_resp->code != 0) {
qInfo()<<"不加图片推送未穿工服人数失败";
qInfo()<<QString("SN(%1): 不加图片推送未穿工服人数失败,失败信息%2").arg(sSn).arg(img_resp->msg);
}else {
qInfo()<<QString("SN(%1): 不加图片推送未穿工服人数成功").arg(sSn);
}
instace.deleteObj(img_resp);
}else{
qInfo()<<QString("SN(%1): 推送未穿工服人数失败,失败信息%2").arg(sSn).arg(resp->msg);
}
}else{
qInfo()<<QString("SN(%1): 推送未穿工服人数成功").arg(sSn);
}
instace.deleteObj(resp);
}
}
}
}else{
qInfo()<<"人形识别算法关闭,只有车牌算法了";
qDebug()<<sSn<<":人形识别算法关闭,只有车牌算法了";
}
//关闭车牌识别
if ((algorithmPermissions & 0x01) == 0) {
......@@ -784,7 +801,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
if (parkAreaMap != parkMap.end()) {
value = parkAreaMap->second; // 成功找到,获取
} else {
qInfo()<<sSn<<"==>区域不存在:"<<key;
qInfo()<<QString("SN(%1): 区域%2不存在").arg(sSn).arg(key);
continue;
}
vides_data::requestLicensePlate resultPlate;
......@@ -793,7 +810,7 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
algorithmTaskManage.executeFindlicensePlateNumber(areaMat, lpNumber,resultPlate,currentTime,sSn);
std::list<vides_data::LicensePlate>ps =resultPlate.plates;
qInfo()<<QString("sn==>%1,区域:%2识别的车牌信息是:%3").arg(sSn).arg(key).
qInfo()<<QString("SN(%1):区域:%2识别的车牌信息是:%3").arg(sSn).arg(key).
arg(lpNumber);
if(ps.size()==0){
int res=-1;
......@@ -828,13 +845,13 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
RecognizedInfo recognizedInfo;
if (maxPlate.new_color=="蓝牌" && maxPlate.new_plate.length() != 7) {
qInfo()<<sSn<<"==>蓝牌车牌号:"<<maxPlate.new_plate<<"===>recognition.new_plate.length():"<<maxPlate.new_plate.length();
qDebug()<<QString("SN(%1): 蓝牌车牌号%2,recognition.new_plate.length()=%3").arg(sSn).arg(maxPlate.new_plate).arg(maxPlate.new_plate.length());
continue;
} else if (maxPlate.new_color=="绿牌新能源" && maxPlate.new_plate.length() != 8) {
qInfo()<<sSn<<"==>绿牌车牌号:"<<maxPlate.new_plate<<"===>recognition.new_plate.length():"<<maxPlate.new_plate.length();
qDebug()<<QString("SN(%1): 绿牌车牌号%2,recognition.new_plate.length()=%3").arg(sSn).arg(maxPlate.new_plate).arg(maxPlate.new_plate.length());
continue;
} else if (maxPlate.new_plate.length() < 7) {
qInfo()<<sSn<<"==>非绿牌蓝牌车牌号:"<<maxPlate.new_plate<<"===>recognition.new_plate.length():"<<maxPlate.new_plate.length();
qDebug()<<QString("SN(%1): 非绿牌蓝牌车牌号%2,recognition.new_plate.length()=%3").arg(sSn).arg(maxPlate.new_plate).arg(maxPlate.new_plate.length());
continue;
}
if(maxPlate.text_confidence>=carConfidenceMax){
......@@ -851,7 +868,8 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
this->checkAndUpdateCurrentPlate(value,areaMat,recognizedInfo,res);
}
if(maxPlate.text_confidence<=carConfidenceMin){
qInfo()<<sSn<<"==>recognition.text_confidence<=instace.getCarConfidenceMin"<<carConfidenceMin;
qDebug()<<QString("SN(%1): recognition.text_confidence<=instace.getCarConfidenceMin:%2").arg(sSn).arg(carConfidenceMin);
continue;
}
if(maxPlate.text_confidence>carConfidenceMin
......@@ -870,7 +888,6 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
maxPlate.img=imgs;
maxPlate.new_color=recognizedInfo.getColor();
newPlate.plates.push_back(std::move(maxPlate));
qInfo()<<QString("当前进入ps.size()>0 --> res == Exit || res == Mobilization 是当前校验返回结果是:%1").arg(res);
}
if(res==ExitAndMobilization){
maxPlate.areaLocation=value->getArea();
......@@ -883,7 +900,8 @@ void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
}
if(newPlate.plates.size()>0){
foreach (auto var, newPlate.plates) {
qInfo()<<QString("sn:%1 =>识别的车牌号是:%2").arg(sSn).arg(var.new_plate);
qInfo()<<QString("SN(%1): 识别的车牌号是:%2").arg(sSn).arg(var.new_plate);
}
licensePlateRecognitionResults(newPlate);
}
......@@ -942,11 +960,13 @@ void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlat
httpService.setHttpUrl(httpUrl);
vides_data::response* img_resp = httpService.httpLicensePlateRecognition(location, result);
if (img_resp->code != 0) {
qInfo()<<"车牌不加图片上传失败";
qInfo()<<QString("SN(%1): 车牌不加图片上传失败:失败信息%2").arg(sSn).arg(img_resp->msg);
}else{
qInfo()<<QString("SN(%1): 车牌不加图片上传成功").arg(sSn);
}
instace.deleteObj(img_resp);
}else{
qInfo()<<"licensePlateRecognitionResults:车牌识别结果失败";
qInfo()<<QString("SN(%1): 车牌识别结果上传失败:失败信息%2").arg(sSn).arg(resp->msg);
// 在达到最大重试次数且仍然没有成功的情况下执行相应的处理逻辑
}
......@@ -988,7 +1008,7 @@ void CameraHandle::sdkWifi(QString &pwd,QString &ssid){
int nLen = sizeof(szOutBuffer);
int res =XSDK_DevSetSysConfigSyn(hDevice, JK_NetWork_Wifi, wipCfg, strlen(wipCfg), szOutBuffer, &nLen, 3000, EXCMD_CONFIG_SET);
if(res<0){
qInfo() << "修改wifi失败";
qInfo()<<QString("SN(%1): 修改wifi失败").arg(sSn);
}
deviceReboot();
......@@ -1003,23 +1023,23 @@ void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){
int res = XSDK_DevSetSysConfigSyn(hDevice, JK_System_TimeZone, zoneCfg, strlen(zoneCfg), outBuffer, &nInOutBufSize, 3000, EXCMD_CONFIG_GET);
if(res<0){
qInfo() << "FirstUserTimeZone:修改失败";
qInfo()<<QString("SN(%1): FirstUserTimeZone 修改失败").arg(sSn);
}
res=XSDK_DevSynTime(hDevice,cTime,0);
if(res<0){
qInfo() << "sdkDevSystemTimeZoneSyn:修改失败";
qInfo()<<QString("SN(%1): sdkDevSystemTimeZoneSyn修改失败").arg(sSn);
}
}
//录像设置
void CameraHandle::sdkRecordCfg(const char * recordJson){
qInfo()<<recordJson;
char szOutBuffer[512] = { 0 };
int nLen = sizeof(szOutBuffer);;
int res=XSDK_DevSetSysConfigSyn(hDevice,JK_Record,recordJson,strlen(recordJson),szOutBuffer,&nLen,3000,EXCMD_CONFIG_SET);
if(res<0){
qInfo() << "sdkRecordCfg 录像设置->修改失败"<<res;
qInfo()<<QString("SN(%1): sdkRecordCfg 录像设置->修改失败").arg(sSn);
}
}
//配置编码设置
......@@ -1028,7 +1048,8 @@ void CameraHandle::sdkEncodeCfg(const char* pCfg){
int nLen = sizeof(szOutBuffer);
int res=XSDK_DevSetSysConfigSyn(hDevice,JK_Simplify_Encode,pCfg,strlen(pCfg),szOutBuffer,&nLen,3000,EXCMD_CONFIG_SET);
if(res<0){
qInfo() << "sdkEncodeCfg 配置编码设置->修改失败"<<res;
qInfo()<<QString("SN(%1): sdkEncodeCfg 配置编码设置->修改失败").arg(sSn);
}
}
......@@ -1037,7 +1058,8 @@ int CameraHandle::updateSdkDevStatus(bool status){
int nInOutSize = sizeof(szOutBuffer);
int res= XSDK_DevGetSysConfigSyn(hDevice,JK_NetWork_SPVMN,szOutBuffer,&nInOutSize,4000,EXCMD_CONFIG_GET);
if(res<0){
qInfo() << sSn<<"GB28181可远程开关"<<res;
qInfo()<<QString("SN(%1): GB28181可远程开关").arg(sSn);
return 0 ;
}
XSDK_CFG::NetWork_SPVMN config;
......@@ -1130,7 +1152,7 @@ void CameraHandle::sdkDevSpvMn(const char *spvMn){
int nLen = sizeof(szOutBuffer);
int res=XSDK_DevSetSysConfigSyn(hDevice,JK_NetWork_SPVMN,spvMn,strlen(spvMn),szOutBuffer,&nLen,3000,EXCMD_CONFIG_SET);
if(res<0){
qInfo() << "sdkDevSpvMn 28181->修改失败"<<res;
qInfo() <<sSn<< ":sdkDevSpvMn 28181->修改失败"<<res;
}
}
int CameraHandle::deviceReboot(){
......@@ -1141,7 +1163,7 @@ int CameraHandle::deviceReboot(){
const char* pCfg = cfg.ToString();
nRet = XSDK_DevSetSysConfig(hDevice, JK_OPMachine, pCfg, strlen(pCfg), 1, 3000, EXCMD_SYSMANAGER_REQ);
if(nRet<0){
qInfo() << sSn<<"重启相机失败"<<nRet;
qInfo() << sSn<<":重启相机失败"<<nRet;
return 0 ;
}
QString ip=QString::fromUtf8(loginParam->sDevId);
......@@ -1159,7 +1181,7 @@ int CameraHandle::deviceShutdown()
const char* pCfg = cfg.ToString();
nRet = XSDK_DevSetSysConfig(hDevice, JK_OPMachine, pCfg, strlen(pCfg), 1, 3000, EXCMD_SYSMANAGER_REQ);
if(nRet<0){
qInfo() << sSn<<"设备关机失败"<<nRet;
qInfo()<<QString("SN(%1): 设备关机失败:%2").arg(sSn).arg(nRet);
return 0;
}
......
......@@ -127,6 +127,10 @@ public:
void setMediaHandle(int mediaHandle);
void initAlgorithmPermissions(__uint8_t algorithm);
void initFaceFrequency(uint64 face_frequency);
void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas);
bool compareLists(const std::list<vides_data::responseArea>& newAreas);
......
......@@ -192,7 +192,7 @@ cv::Mat FaceReconitionHandle::loadImageFromByteStream(const QString& filePath) {
void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
void FaceReconitionHandle::doesItExistEmployee(const QString &sn,const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
ScopeSemaphoreExit streamGuard([this]() {
isRunning.store(false, std::memory_order_release);
......@@ -211,14 +211,14 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
if (ret != HSUCCEED) {
qInfo()<<QString("image handle error:%1").arg((long) imageSteamHandle,0,10);
qInfo()<<QString("SN(%1): image handle error:%2").arg(sn).arg((long)imageSteamHandle,0,10);
return ;
}
HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) {
qInfo()<<QString("search 未检测到人脸");
qInfo()<<QString("SN(%1): 未检测到人脸").arg(sn);
return ;
}
......@@ -227,11 +227,12 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
HInt32 featureNum;
HF_GetFeatureLength(ctxHandle, &featureNum);
for(int j=0;j< multipleFaceData.detectedNum; ++j){
qInfo()<<QString("doesItExistEmployee==>面部索引: %1").arg(j);
//qInfo()<<QString("doesItExistEmployee==>面部索引: %1").arg(j);
qDebug() << QString("SN(%1): 面部索引:%2").arg(sn).arg(j);
std::vector<float> newfeature(featureNum,0.0f);
ret = HF_FaceFeatureExtractCpy(ctxHandle, imageSteamHandle, multipleFaceData.tokens[j], newfeature.data());
if(ret != HSUCCEED) {
qInfo()<<QString("特征提取出错: %1").arg(ret);
qInfo() << QString("SN(%1): 特征提取出错:%2").arg(sn).arg(ret);
HF_ReleaseImageStream(imageSteamHandle);
return ;
}
......@@ -248,13 +249,12 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
HFloat confidence;
ret = HF_FeaturesGroupFeatureSearch(ctxHandle, feature, &confidence, &searchIdentity);
if (ret != HSUCCEED) {
qInfo()<<QString("搜索失败: %1").arg(ret);
qInfo() << QString("SN(%1): 搜索失败:%2").arg(sn).arg(ret);
return ;
}
qInfo()<<QString("搜索置信度: %1").arg(confidence);
qInfo()<<QString("匹配到的tag: %1").arg(searchIdentity.tag);
qInfo()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
qInfo() << QString("SN(%1): 搜索置信度:%2").arg(sn).arg(confidence);
qInfo() << QString("SN(%1): 匹配到的tag:%2").arg(sn).arg(searchIdentity.tag);
qInfo() << QString("SN(%1): 匹配到的customId:%2").arg(sn).arg(searchIdentity.customId);
// Face Pipeline
//printf("人脸特征数量: %d", faceNum);
if (confidence > configConfidence) {
......@@ -275,7 +275,7 @@ void FaceReconitionHandle::doesItExistEmployee(const cv::Mat &source,std::list<v
// printf("image released");
} else {
//printf("image release error: %ld", ret);
qInfo()<<QString("image release error: %1").arg(ret);
qInfo() << QString("SN(%1): image release error:%2").arg(sn).arg(ret);
}
}
......@@ -33,7 +33,7 @@ public:
void setImageChanged(bool imageChanged);
void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face);
void doesItExistEmployee(const QString &sn,const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face);
void initSourceImageMap(std::map<QString,QString>&maps,int numberFaces,float confidence);
......
......@@ -354,11 +354,6 @@ vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &h
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qInfo()<<"httpPostFacePopulation===>";
qInfo()<<m_httpClient.errorCode();
qInfo()<<m_httpClient.errorString();
qInfo()<<"httpPostFacePopulation===>end";
resp->code=2;
resp->msg=m_httpClient.errorCode();
}
......@@ -511,7 +506,6 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
QJsonObject faceConfigObj = dataObj["faceConfig"].toObject();
config.faceConfig.isOn = faceConfigObj["isOn"].toBool();
config.faceConfig.faceNumbers = faceConfigObj["faceNumbers"].toInt();
config.faceConfig.faceFrequency = faceConfigObj["faceFrequency"].toInt();
config.faceConfig.confidence = faceConfigObj["confidence"].toVariant().toFloat();
config.faceConfig.updateAt = faceConfigObj["updateAt"].toVariant().toULongLong();
config.faceConfig.faceLen=faceConfigObj["faceLen"].toInt();
......@@ -542,6 +536,7 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
QJsonObject humanConfigObj = dataObj["humanConfig"].toObject();
config.humanConfig.isOn=humanConfigObj["isOn"].toBool();
config.humanConfig.updateAt = humanConfigObj["updateAt"].toVariant().toULongLong();
config.humanConfig.faceFrequency = humanConfigObj["faceFrequency"].toVariant().toUInt();
// 解析 devicesConfig
QJsonObject devicesConfigObj = dataObj["camera"].toObject();
......
......@@ -77,7 +77,7 @@ void HumanDetection::setHuManParameter(int &uniformColor){
}
//0 人形 1 车形 2 工服
int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap, float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate) {
int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int>&reMap,QString &sSn, float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate) {
thread_time.store(QDateTime::currentMSecsSinceEpoch(), std::memory_order_release);
TCV_CameraStream *stream = TCV_CreateCameraStream();
......@@ -100,8 +100,7 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
if (res == 0x00 || res == 0x02) {
num = TCV_HumanDetectorGetNumOfHuman(detector);
qInfo()<<"TCV_HumanDetectorGetNumOfHuman==>"<<num;
qInfo() << QString("SN(%1): 获取人体数量:%2").arg(sSn).arg(num);
if (num == 0) return num; // 无行人检测结果,提前返回
std::vector<TCV_ObjectLocation> results(num);
TCV_HumanDetectorGetHumanLocation(detector, results.data(), num);
......@@ -133,8 +132,6 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
}
reMap[0x02] = count_no_uniform; // 未穿工服的行人数量
reMap[0x00] = count_all; // 所有满足条件的行人数量
qInfo()<<"count_all==>"<<count_all;
qInfo()<<"count_no_uniform==>"<<count_no_uniform;
num = (res == 0x00) ? count_all : count_no_uniform;
}
......
......@@ -16,7 +16,7 @@ public:
float carShapeConfidence);
~HumanDetection();
int findHuManCar(const cv::Mat &source,int res,std::map<int,int>&reMap,
int findHuManCar(const cv::Mat &source,int res,std::map<int,int>&reMap,QString &sSn,
float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate);
void setHuManParameter(int &uniformColor);
......
......@@ -82,7 +82,6 @@ void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const
} else {
type = types[results.plates[i].type];
}
qInfo()<<QString("车牌号:%1").arg(results.plates[i].code);
QString plateResult = QString("第%1个,%2,车牌号:%3,置信度:%4,左上角点x坐标:%5,左上角点y坐标:%6,右下角点x坐标:%7,右下角点y坐标:%8")
.arg(i + 1).arg(type.c_str()).arg(results.plates[i].code)
.arg(results.plates[i].text_confidence).arg(results.plates[i].x1, 0, 'Q', 4)
......@@ -168,7 +167,6 @@ void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString
newPlate.new_color=QString::fromStdString(type);
QString car_nuber=QString::fromUtf8(results.plates[i].code);
replaceWith1And0(car_nuber);
qInfo()<<"I O (i o)大小写替换为 1 0结果:==>"<<car_nuber;
newPlate.new_plate=car_nuber;
newPlate.text_confidence=results.plates[i].text_confidence;
vides_data::ParkingArea area;
......
......@@ -12,7 +12,7 @@
#include <QTextStream>
#include <QTextCodec>
const int g_logLimitSize = 5;
const int g_logLimitSize = 45;
struct LogHandlerPrivate {
LogHandlerPrivate();
......
......@@ -25,7 +25,7 @@ LogHandlerPrivate::LogHandlerPrivate() {
openAndBackupLogFile();
// 十分钟检查一次日志文件创建时间
renameLogFileTimer.setInterval(1000*2); // TODO: 可从配置文件读取
renameLogFileTimer.setInterval(1000 * 60 * 10); // TODO: 可从配置文件读取
renameLogFileTimer.start();
QObject::connect(&renameLogFileTimer, &QTimer::timeout, [this] {
QMutexLocker locker(&LogHandlerPrivate::logMutex);
......@@ -91,9 +91,16 @@ void LogHandlerPrivate::openAndBackupLogFile() {
logFile->close();
delete logOut;
delete logFile;
QDate renameDate = logFileCreatedDate;
if (logFileCreatedDate == QDate::currentDate()) {
renameDate = QDate::currentDate().addDays(-1); // 设置为昨天的日期
}
QString newLogPath = logDir.absoluteFilePath(logFileCreatedDate.toString("yyyy-MM-dd.log"));
QFile::rename(logPath, newLogPath); // 将原始日志文件重命名为新的文件名
QString newLogPath = logDir.absoluteFilePath(renameDate.toString("yyyy-MM-dd.log"));
// QFile::rename(logPath, newLogPath); // 将原始日志文件重命名为新的文件名
QFile::copy(logPath, newLogPath);
QFile::remove(logPath);
logFile = new QFile(logPath);
logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : nullptr;
......@@ -105,8 +112,8 @@ void LogHandlerPrivate::openAndBackupLogFile() {
// 检测当前日志文件大小
void LogHandlerPrivate::checkLogFiles() {
// 如果 protocal.log 文件大小超过5M,重新创建一个日志文件,原文件存档为yyyy-MM-dd_hhmmss.log
if (logFile->size() > 1024*g_logLimitSize) {
// 如果 protocal.log 文件大小超过55M,重新创建一个日志文件,原文件存档为yyyy-MM-dd_hhmmss.log
if (logFile->size() > 1024*1024*g_logLimitSize) {
logFile->flush();
logFile->close();
delete logOut;
......@@ -114,7 +121,11 @@ void LogHandlerPrivate::checkLogFiles() {
QString logPath = logDir.absoluteFilePath("today.log"); // 日志的路径
QString newLogPath = logDir.absoluteFilePath(logFileCreatedDate.toString("yyyy-MM-dd.log"));
QFile::rename(logPath, newLogPath);
// QFile::rename(logPath, newLogPath);
QFile::copy(logPath, newLogPath);
QFile::remove(logPath);
logFile = new QFile(logPath);
logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : NULL;
logFileCreatedDate = QDate::currentDate();
......
......@@ -230,7 +230,7 @@ int MediaFaceImage::ToFile(const char* pFileName, const void* pData, int nLength
// return pInOutBufferSize; // pOutBuffer由智能指针管理,此处无需手动释放
//}
int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat &image) {
int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat &image,QString &sSn) {
const int BufferSize = 1024 * 1024 * 2; // 缓冲区大小
image.release(); // 释放之前的图像
std::unique_ptr<unsigned char[]> pOutBuffer(new unsigned char[BufferSize]); // 智能指针管理内存
......@@ -238,7 +238,7 @@ int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat
int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer.get(), &pInOutBufferSize);
if (ret < 0 || pInOutBufferSize <= 0) {
qInfo() << "同步设备端抓图失败";
qInfo() <<sSn <<":同步设备端抓图失败";
return -1;
}
......@@ -249,12 +249,12 @@ int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat
try {
cv::Mat decodedImage = cv::imdecode(buffer, cv::IMREAD_UNCHANGED);
if (decodedImage.empty()) {
qInfo() << "图像解码失败";
qInfo() << sSn<<":图像解码失败";
return -1;
}
image = std::move(decodedImage);
} catch (const cv::Exception& e) {
qInfo() << "图像解码过程中捕获异常:" << e.what();
qInfo() << sSn<<":图像解码过程中捕获异常:" << e.what();
return -1;
}
......
......@@ -18,7 +18,7 @@ public:
static MediaFaceImage* getInstance(); // 单例模式获取实例的静态成员函数
void ParserImageData(const unsigned char* pData, int nDataLen, char* pJpg, int* nJpgLen, char* pJson);
int FaceImageCallBack(XSDK_HANDLE hMedia,int nChannel,cv::Mat &image);
int FaceImageCallBack(XSDK_HANDLE hMedia,int nChannel,cv::Mat &image,QString &sSn);
int CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer);
int ToFile(const char* pFileName, const void* pData, int nLenght);
......
......@@ -10,7 +10,6 @@ ParkingSpaceInfo::ParkingSpaceInfo(){
}
ParkingSpaceInfo::~ParkingSpaceInfo(){
qInfo() << "ParkingSpaceInfo:关闭";
}
void ParkingSpaceInfo::addQueue(RecognizedInfo &info){
QMutexLocker locker(&queueMutex);
......
......@@ -250,7 +250,6 @@ struct Camera {
struct FaceConfig {
bool isOn;
int faceNumbers;
uint64 faceFrequency;
float confidence;
int faceLen;
quint64 updateAt;
......@@ -281,7 +280,7 @@ struct UniformConfig {
};
struct HumanConfig{
bool isOn;
int humanDetectionLen;
uint64 faceFrequency;
quint64 updateAt;
};
......
......@@ -13,7 +13,8 @@ TEMPLATE = app
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
DEFINES += APP_VERSION=\\\"1.3.2\\\"
DEFINES += QT_MESSAGELOGCONTEXT
DEFINES += QT_NO_DEBUG_OUTPUT
QMAKE_LIBDIR += /usr/local/lib
INCLUDEPATH+=/usr/local/include/opencv4
......@@ -24,6 +25,9 @@ INCLUDEPATH+=/usr/local/include/human
INCLUDEPATH+=/usr/local/include/CImg
INCLUDEPATH+=/usr/local/include/mqtt
# 禁用所有警告
QMAKE_CXXFLAGS += -w
......
......@@ -557,7 +557,8 @@ void MainWindow::startCamera(const QString &httpurl){
reStatus.camera_info_list.push_front(camera_info);
__uint8_t new_algorithm= intToUint8t(devConfig.faceConfig.isOn,devConfig.licensePlateConfig.isOn,devConfig.uniformConfig.isOn,devConfig.humanConfig.isOn);
uint64 face_frequency=devConfig.humanConfig.faceFrequency;
offlineCameraHandle->initFaceFrequency(face_frequency);
offlineCameraHandle->cameraParameterUpdate(devConfig);
offlineCameraHandle->initAlgorithmPermissions(new_algorithm);
if(!offlineCameraHandle->compareLists(device.areas)){
......@@ -724,7 +725,6 @@ void MainWindow::initEncodeToString(QString &enCodeJson) {
}
bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &workSpWMn,QString &sn){
qInfo()<<"iniWorkSpVMn=="<<sn;
QString jsonfile=QCoreApplication::applicationDirPath()+"/camera_config.json";
bool isEqual=true;
......@@ -975,8 +975,6 @@ __uint8_t MainWindow::intToUint8t(bool faceAlgorithm, bool licensePlateAlgorithm
// 车牌识别对应最低位(第0位)
result |= (licensePlateAlgorithm ? 1 : 0);
qInfo()<<"算法结果"<<result;
return result;
}
void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data::responseConfig &devConfig,const std::list<vides_data::responseArea>&areas,std::list<vides_data::requestCameraInfo>&camera_info_list){
......@@ -988,9 +986,10 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data:
CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,image_save,heightReference,devConfig);
int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,3000);
qInfo()<<"句柄为2:"<<sdk_handle;
qDebug() << QString("SN(%1): 句柄为%2").arg(parameter.sSn).arg(sdk_handle);
if(sdk_handle<=0){
qInfo() << "登录失败";
qInfo() << QString("SN(%1): 登录失败").arg(parameter.sSn);
return ;
}
mediaFaceImage->setMap(sdk_handle,cameraHandle);
......@@ -1002,7 +1001,7 @@ void MainWindow::initCameras(vides_data::cameraParameters &parameter,vides_data:
int synTime=devConfig.camera.devSnapSynTimer;
uint64 face_frequency=devConfig.faceConfig.faceFrequency;
uint64 face_frequency=devConfig.humanConfig.faceFrequency;
float carConfidenceMax=devConfig.licensePlateConfig.carConfidenceMax;
float carConfidenceMin=devConfig.licensePlateConfig.carConfidenceMin;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment