Commit d6c2042f by liusq

非ui的盒子代码

parents
# This file is used to ignore files which are generated
# ----------------------------------------------------------------------------
*~
*.autosave
*.a
*.core
*.moc
*.o
*.obj
*.orig
*.rej
*.so
*.so.*
*_pch.h.cpp
*_resource.rc
*.qm
.#*
*.*#
core
!core/
tags
.DS_Store
.directory
*.debug
Makefile*
*.prl
*.app
moc_*.cpp
ui_*.h
qrc_*.cpp
Thumbs.db
*.res
*.rc
/.qmake.cache
/.qmake.stash
# qtcreator generated files
*.pro.user*
# xemacs temporary files
*.flc
# Vim temporary files
.*.swp
# Visual Studio generated files
*.ib_pdb_index
*.idb
*.ilk
*.pdb
*.sln
*.suo
*.vcproj
*vcproj.*.*.user
*.ncb
*.sdf
*.opensdf
*.vcxproj
*vcxproj.*
# MinGW generated files
*.Debug
*.Release
# Python byte code
*.pyc
# Binaries
# --------
*.dll
*.exe
<RCC>
<qresource prefix="/gamera">
<file>gameras.ini</file>
</qresource>
<qresource prefix="/images">
<file>test_data/0.jpg</file>
<file>test_data/1.jpg</file>
<file>test_data/2.jpg</file>
<file>test_data/fake.jpg</file>
<file>test_data/hasface.jpg</file>
<file>test_data/mask.jpg</file>
<file>test_data/noface.jpg</file>
<file>test_data/nomask.jpg</file>
<file>test_data/p1.jpg</file>
<file>test_data/p2.jpg</file>
<file>test_data/p3.jpg</file>
<file>test_data/real.jpg</file>
<file>test_data/crop.png</file>
<file>yifei.jpg</file>
<file>rgb_fake.jpg</file>
<file>mask.png</file>
<file>Kunkun.jpg</file>
<file>kun.jpg</file>
<file>face_sample.png</file>
<file>face_comp.jpeg</file>
<file>cxk.jpg</file>
<file>crop.png</file>
<file>blur.jpg</file>
</qresource>
<qresource prefix="/model_zip">
<file>T1</file>
<file>T1.index</file>
</qresource>
<qresource prefix="/lprv3u_models">
<file>rpv3_mdict_160h.mnn</file>
<file>litemodel_cls_96xh.mnn</file>
<file>b320_backbone_h.mnn</file>
<file>b320_header_h.mnn</file>
<file>b640x_backbone_h.mnn</file>
<file>b640x_head_h.mnn</file>
</qresource>
</RCC>
#include "CameraHandle.h"
#include "TaskRunnable.h"
#include "HumanDetection.h"
#include <QRegularExpression>
CameraHandle::CameraHandle(){
}
CameraHandle::CameraHandle(QString &url, QString &httpUrl, QString &sSn, int &channel,const QString &modelPaths, float carConfidence,int imageSave)
: hDevice(-1),
url(url),
loginParam(new SXSDKLoginParam()),
sxMediaFaceImageReq(new SXMediaFaceImageReq()),
sSn(sSn),
channel(channel),
httpUrl(httpUrl),
dev_snap_syn_timer(new QTimer()),
release_timer(new QTimer()),
image_save(imageSave),
semaphore(1) {
connect(this, SIGNAL(afterDownloadFile(int,int,QString)), this, SLOT(pushRecordToCloud(int,int,QString)),Qt::QueuedConnection);
detector = TCV_CreateHumanDetector();
// 设置检测得分阈值 默认0.5
TCV_HumanDetectorSetScoreThreshold(detector, 0.5f);
HLPR_ContextConfiguration configuration = {0};
QByteArray && by_mpath=modelPaths.toUtf8();
char* m_path=by_mpath.data();
configuration.models_path = m_path;
configuration.max_num = 5;
configuration.det_level = DETECT_LEVEL_LOW;
configuration.use_half = false;
configuration.nms_threshold = 0.5f;
configuration.rec_confidence_threshold = carConfidence;
configuration.box_conf_threshold = 0.30f;
configuration.threads = 1;
ctx = HLPR_CreateContext(&configuration);
connect(release_timer, &QTimer::timeout, this, &CameraHandle::releaseSemaphore);
}
CameraHandle::~CameraHandle() {
Common & instace= Common::getInstance();
dev_snap_syn_timer->stop();
instace.deleteObj(dev_snap_syn_timer);
instace.deleteObj(release_timer);
instace.deleteObj(loginParam);
instace.deleteObj(sxMediaFaceImageReq);
if(detector!=nullptr){
TCV_ReleaseHumanDetector(detector);
detector=nullptr;
}
for(auto iter = parkMap.begin(); iter != parkMap.end(); ++iter) {
instace.deleteObj( iter->second);
}
parkMap.clear();
QThreadPool::globalInstance()->waitForDone();
}
int CameraHandle::sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout) {
QByteArray byteArray = sDevId.toUtf8();
char* cDevid=byteArray.data();
strcpy(loginParam->sDevId, cDevid);
loginParam->nDevPort=nDevPort;
QByteArray byteName = sUserName.toUtf8();
char* cName=byteName.data();
strcpy(loginParam->sUserName, cName);
if(sPassword.length()>0){
QByteArray bytePassword = sPassword.toUtf8();
strcpy(loginParam->sPassword, bytePassword.constData());
}else{
strcpy(loginParam->sPassword, "");
}
loginParam->nCnnType=EDEV_CNN_TYPE_AUTO;
int loginResult =XSDK_DevLoginSyn(loginParam,nTimeout);
if(loginResult<0){
qInfo() << "登录设备失败";
return loginResult;
}
this->hDevice=loginResult;
return loginResult;
}
int XNetSDK_MediaCallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int nParam2, int nParam3, const char* szString, void* pData, int64 pDataInfo, int nSeq, void* pUserData, void* pMsg){
CameraHandle* cameraHandle=static_cast<CameraHandle*>(pUserData);
std::map<QString, QString> &data=cameraHandle->getCurrentData();
QString dName=data.at("downloadFileName");
QByteArray bFname =dName.toUtf8();
const char* cFname=bFname.data();
int sId= data.at("id").toInt();
// 打印I帧信息
if (EXSDK_DATA_FORMATE_FRAME == nDataType)
{
// 帧信息
SXSDK_FRAME_INFO* pFrame = (SXSDK_FRAME_INFO*)pDataInfo;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
if (pFrame->nType == XSDK_FRAME_TYPE_VIDEO && pFrame->nSubType == XSDK_FRAME_TYPE_VIDEO_I_FRAME)
{
//printf("[%d]::OnFrame[Len:%d][Type:%d/%d][%04d-%02d-%02d %02d:%02d:%02d-%03d]\r\n", hMedia, pFrame->nLength, pFrame->nType, pFrame->nSubType, pFrame->nYear, pFrame->nMonth, pFrame->nDay, pFrame->nHour, pFrame->nMinute, pFrame->nSecond, (int)(pFrame->nTimeStamp % 1000));
}
if (cameraHandle->getMediaHandle() > 0 && cameraHandle->getMediaHandle()== hMedia)
{
if (pFrame->nSubType == XSDK_ENCODE_VIDEO_JPEG)
{
mediaFaceImage->AbFile(cFname, pFrame->pContent, pFrame->nFrameLength);
}
else
{
// 不带公司私有头数据,(裸H264 或 H265 或 g711a 或 aac)
// printf("Frame-Content:[Len:%d][%p]\r\n", pFrame->nFrameLength, pFrame->pContent);
// 带公司私有头数据
mediaFaceImage->AbFile(cFname, pFrame->pHeader, pFrame->nLength);
}
}
}
else if (ESXSDK_MEDIA_START_REAL_PLAY == nDataType
|| ESXSDK_MEDIA_START_FACE_IMAGE == nDataType
|| ESXSDK_MEDIA_START_RECORD_PLAY == nDataType
|| ESXSDK_MEDIA_DOWN_RECORD_FILE == nDataType
|| ESXSDK_MEDIA_DOWN_IMAGES_FILE == nDataType
|| ESXSDK_MEDIA_START_TALK == nDataType
)
{
int& nResult = nDataLen;
}
else if (EXCMD_MONITOR_DATA == nDataType
|| EXCMD_DOWNLOAD_DATA == nDataType
|| EXCMD_PLAY_DATA == nDataType
|| EXCMD_TALK_CU_PU_DATA == nDataType
|| EXCMD_TALK_PU_CU_DATA == nDataType
)
{
// 2秒打印一次
/*IF_Printf_Timeout(2)
{
printf("[%d]::OnMedia[%d][DataLen:%d]\r\n", hMedia, nDataType, nDataLen);
}*/
if (EXCMD_DOWNLOAD_DATA == nDataType)
{
qDebug()<<"EXCMD_DOWNLOAD_DATA"<<nDataType;
/*if (g_hRecordDownload > 0 && g_hRecordDownload == hMedia)
{
std::string::size_type pos = g_test.sDownloadFileName.rfind('.');
std::string strSuffix = g_test.sDownloadFileName.substr(pos + 1, pos + 2);
if (STRCMP(strSuffix.c_str(), "jpg") == 0)
{
const char* pWriteData = (const char*)pData;
if(stInit == 0)
{
pWriteData += 16;
nDataLen -= 16;
++stInit;
}
XFILE::ABFile(g_test.sDownloadFileName.c_str(), pWriteData, nDataLen);
}
else
{
// 不带公司私有头数据,(裸H264 或 H265 或 g711a 或 aac)
// printf("Frame-Content:[Len:%d][%p]\r\n", pFrame->nFrameLength, pFrame->pContent);
// 带公司私有头数据
XFILE::ABFile(g_test.sDownloadFileName.c_str(), pData, nDataLen);
}
}
}*/
}
}
else if (EXSDK_DATA_MEDIA_ON_PLAY_STATE == nDataType)
{
//int& nState = nDataLen;
//printf("[%d]::OnMediaStateChannged[nState:%d]\r\n", hMedia, nState);
if (nDataLen == EState_Media_DataEnd)
{
if (cameraHandle->getMediaHandle() > 0)
{
XSDK_MediaStop(cameraHandle->getMediaHandle());
cameraHandle->setMediaHandle(0);
QString mp4FileName =dName;
mp4FileName.replace(QRegularExpression("\\.[^\\.]*$"), ".mp4");
data["downloadFileName"]=mp4FileName;
QProcess ffmpegProcess;
QStringList arguments;
arguments << "-i" << dName <<"-c:v" << "copy" << mp4FileName;
ffmpegProcess.start("ffmpeg", arguments);
// 等待 ffmpeg 进程结束
if (ffmpegProcess.waitForFinished(20000)) {
//QFile::remove(dName);
qDebug() << "ffmpeg process finished successfully.";
} else {
qDebug() << "Error: ffmpeg process did not finish.";
}
// 销毁 QProcess 对象
ffmpegProcess.close();
QFileInfo fileInfo(mp4FileName);
QString fileName = fileInfo.fileName();
QString ossUrl=data.at("ossUrl");
ossUrl.append("/").append(fileName);
std::map<int, vides_data::responseRecognitionData>datas =cameraHandle->getVideoCurrentData();
QString recognitionType;
if(datas.count(sId)>0){
recognitionType=datas.at(sId).recognitionType;
}
emit cameraHandle->afterDownloadFile(sId,recognitionType.toInt(),ossUrl);
}
}
}
}
int CameraHandle::sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener) {
return XSDK_DevSetAlarmListener(hDevice,bListener);
}
int CameraHandle::getChannel(){
return channel;
}
int CameraHandle::getHdevice() {
return hDevice;
}
void CameraHandle::getCurrentFrame(std::vector<uchar> &buffer){
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
mediaFaceImage->CameraImage(this->hDevice,this->channel,buffer);
}
void CameraHandle::initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer) {
connect(dev_snap_syn_timer, &QTimer::timeout, this, [this,hDevice]() {
this->sdkRealTimeDevSnapSyn(hDevice);
},Qt::QueuedConnection);
dev_snap_syn_timer->start(syn_timer);
}
void CameraHandle::sdkRealTimeDevSnapSyn(int hDevice) {
QThreadPool* threadPool = QThreadPool::globalInstance();
threadPool->setMaxThreadCount(8);
auto taskSyn = std::bind(&CameraHandle::sdkDevSnapSyn, this, hDevice, this->channel);
auto taskRunnable = new TaskRunnable(taskSyn, hDevice, this->channel, RunFunction::SdkDevSnapSyn);
threadPool->start(taskRunnable);
}
QString CameraHandle::getSSn(){
return sSn;
}
int CameraHandle::getMediaHandle(){
return mediaHandle;
}
void CameraHandle::setMediaHandle(int mediaHandle){
this->mediaHandle=mediaHandle;
}
void CameraHandle::setCurrentFace(int currentFace){
std::lock_guard<std::mutex> guard(faceMutex);
this->currentFace=currentFace;
}
std::map<QString, QString>&CameraHandle::getCurrentData(){
return currentData;
}
std::map<int, vides_data::responseRecognitionData>&CameraHandle::getVideoCurrentData(){
return videoCurrentData;
}
void CameraHandle::sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
QString startTimer, QString endTime){
if(mediaHandle>0){
XSDK_MediaStop(mediaHandle);
this->mediaHandle=0;
}
currentData["id"]=QString::number(id);
Common & instace= Common::getInstance();
QString videoPath=instace.getVideoOut();
QDateTime now = QDateTime::currentDateTime();
QString szTime = now.toString("yyyy_MM_dd_hh_mm_ss");
QString newSn=sSn;
newSn.append("_");
newSn.append(szTime);
//downloadFileName=QString("%1/%2.h264").arg(videoPath, szTime);
currentData["downloadFileName"] =QString("%1%2.h264").arg(videoPath, newSn);
SXMediaRecordReq param = { 0 };
QByteArray bStart =startTimer.toUtf8();
const char* cStart=bStart.data();
QByteArray bEnd=endTime.toUtf8();
const char* cEnd=bEnd.data();
strcpy(param.sBeginTime, cStart); // 请求的开始时间(必填)
strcpy(param.sEndTime, cEnd); // 请求的结束时间(必填)
param.nChannel = channel; // 通道号(必填)
param.nStreamType = 0; // 码流类型(必填)
param.nRequestType = EXSDK_DATA_FORMATE_FRAME; // 源数据输出(选填),详见nRequestType说明
//param.result = sdkInitCallback; // 结果回调(必填)
SMsgReceiver sms(nullptr,XNetSDK_MediaCallBack,this);
param.result=sms;
qDebug() << "XSDK_MediaRecordDownload hDevice:"<<this->hDevice;
this->mediaHandle = XSDK_MediaRecordDownload(this->hDevice, &param, 0, 4000);
if ( this->mediaHandle < 0)
{
qInfo() << "XSDK_MediaRecordDownload Failed:"<< this->mediaHandle ;
return ;
}
}
bool CameraHandle::acquireAndReleaseWithTimeout(bool flag) {
if (!semaphore.tryAcquire()) {
qInfo() << (flag ? "callbackFunction:正在执行线程 " : "sdkDevSnapSyn:正在执行线程");
return true;
}
QMetaObject::invokeMethod(release_timer, "start",
Qt::QueuedConnection,
Q_ARG(int, timeoutMs));
return false;
}
void CameraHandle::releaseSemaphore() {
if (release_timer->isActive()) {
QMetaObject::invokeMethod(release_timer, "stop", Qt::QueuedConnection);
}
semaphore.release();
}
int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
QByteArray && byJson = szString.toLocal8Bit();
const char * cJson= byJson.data();
XSDK_CFG::AlarmInfo alarmInfo;
if (0 == alarmInfo.Parse(cJson))
{
const char* buf = alarmInfo.Event.ToString();
qInfo() << "buf:"<<buf;
qInfo() << "OnDevAlarmCallback[Dev:" << hObject << "]"
<< "\r\nEvent:" << alarmInfo.Event.Value()
<< "\r\nChannel:" << alarmInfo.Channel.Value()
<< "\r\nStartTime:" << alarmInfo.StartTime.Value()
<< "\r\nStatus:" << alarmInfo.Status.Value();
}
else
{
qDebug() << "OnDevAlarmCallback[Dev:" << hObject << "][Event:" << szString << "]";
}
if(acquireAndReleaseWithTimeout(false)){
return -1;
}
cv::Mat image;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch();
mediaFaceImage->FaceImageCallBack(hObject,sxMediaFaceImageReq->nChannel,image);
if (image.empty())
{
qInfo() << "Failed to read the image";
return -1;
}
this->updateImage(image,currentTime);
QMetaObject::invokeMethod(release_timer, "stop", Qt::QueuedConnection);
semaphore.release();
}
void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
if(hDevice<=0){
qInfo() << "相机断线";
return;
}
if(acquireAndReleaseWithTimeout(true)){
return ;
}
cv::Mat image;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch();
int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image);
qDebug() << "SdkDevSnapSyn HTTP POST request to: " << sSn;
if (ret < 0) {
offlineCount++; // 累加计数器
qDebug() << "offlineCount: " << loginParam->sDevId<<offlineCount;
if (offlineCount >= 3) { // 判断是否连续3次返回0
qInfo() << "设备离线";
QString ip=QString::fromUtf8(loginParam->sDevId);
MainWindow::sp_this->clearOfflineCameraHandle(ip,loginParam->nDevPort);
// 执行离线处理逻辑
// TODO: 可以在此处更新设备状态、发送告警通知等
// 重置计数器,以便下次再次检测连续离线
offlineCount = 0;
}
} else {
// 如果不连续,则重置计数器
offlineCount = 0;
}
if (image.empty())
{
qInfo() << "Failed to read the image";
return ;
}
this->updateImage(image,currentTime);
QMetaObject::invokeMethod(release_timer, "stop", Qt::QueuedConnection);
semaphore.release();
}
void CameraHandle::setTimeoutMs(int timeoutMs){
this->timeoutMs=timeoutMs;
}
void CameraHandle::matToBase64(const cv::Mat &image, QByteArray &base64Data) {
std::vector<unsigned char> buffer;
std::vector<int> params{cv::IMWRITE_JPEG_QUALITY, 90};
cv::imencode(".jpg", image, buffer, params);
base64Data = QByteArray(reinterpret_cast<const char*>(buffer.data()), buffer.size()).toBase64();
}
void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat &frame, RecognizedInfo& newInfo,
int &result,std::map<int,RecognizedInfo>&exitAndMoMap){
if (newInfo.getLicensePlate() != park->getCurrentPlate().getLicensePlate()) {
int count = 0;
for (auto& info : park->getQueue()) {
if (info.getLicensePlate() == newInfo.getLicensePlate()) {
count++;
}
}
qDebug() << "不同的区域:" << park->getSpaceIndex() << ",数量:" << count;
if (count >= 3) {
//第一次进场 当前车牌就是进来这个,老车牌就是空
if(park->getCurrentPlate().getLicensePlate().length()<=0){
//进场
park->setCurrentPlate(newInfo);
result=CAR_INFORMATION::Mobilization;
}else {
//当前为空,立场
if(newInfo.getLicensePlate().length()<=0){
HumanDetection &humanDetection=HumanDetection::getInstance();
int car_size = humanDetection.findHuManCar(frame,1,detector);
if(car_size<=0){
//出场
park->setCurrentPlate(newInfo);
result=CAR_INFORMATION::Exit;
}else{
park-> removeNoQueue();
qDebug()<<sSn<<":"<<"no出场:"<<car_size;
}
}else{
//当前不为空,新车,新车入场,老车出场
exitAndMoMap[CAR_INFORMATION::Exit]=park->getCurrentPlate();
exitAndMoMap[CAR_INFORMATION::Mobilization]=newInfo;
park->setCurrentPlate(newInfo);
result=CAR_INFORMATION::ExitAndMobilization;
}
}
}
}
}
void CameraHandle::updateImage(const cv::Mat & frame,qint64 currentTime){
Common & instace= Common::getInstance();
qDebug()<<"=============================>";
FaceReconition &faceRecognition = FaceReconition::getInstance();
HumanDetection &humanDetection=HumanDetection::getInstance();
LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance();
static int i=0;
printf("updateImage retryCount: %d \n", ++i);
//faceRecognition.search(frame,imageHandleList,names);
QByteArray imgs;
int faSize=humanDetection.findHuManCar(frame,0,detector);
this->matToBase64(frame, imgs);
HttpService httpService(httpUrl);
if(currentFace!=faSize){
vides_data::response* resp=httpService.httpPostFacePopulation(imgs,faSize,sSn,currentTime);
if (resp->code!= 0) {
qInfo()<<"人数变化推送信息推送失败";
}
instace.deleteObj(resp);
currentFace=faSize;
}
if(faSize>0){
std::list<vides_data::faceRecognitionResult>faces;
faceRecognition.doesItExistEmployee(frame,faces);
if (!faces.empty()) {
for (const auto& face : faces) {
vides_data::requestFaceReconition faceReconition;
faceReconition.id = face.id;
faceReconition.img = imgs;
faceReconition.sn = sSn;
faceReconition.time = currentTime;
faceReconition.area.top_left_corner_x=face.x;
faceReconition.area.top_left_corner_y=face.y;
faceReconition.area.bottom_right_corner_x= face.x + face.width;
faceReconition.area.bottom_right_corner_y= face.y + face.height;
faceReconition.area.bottom_left_corner_x = face.x;
faceReconition.area.bottom_left_corner_y = face.y + face.height;
faceReconition.area.top_right_corner_x = face.x + face.width;
faceReconition.area.top_right_corner_y = face.y;
httpService.setHttpUrl(httpUrl);
vides_data::response* resp = httpService.httpPostFaceReconition(faceReconition);
if (resp->code!= 0) {
qInfo() << "识别人脸信息推送失败:" << face.id;
}
instace.deleteObj(resp);
}
}
}
QString lpNumber;
vides_data::requestLicensePlate plate;
plate.sn=sSn;
// return ;
if(image_save==1){
QString fileName= instace.getVideoOut().append(instace.getTimeString()+".jpg");
bool success = cv::imwrite(fileName.toStdString(), frame);
if (success) {
qDebug() << "图片已成功保存至:" << fileName;
} else {
qDebug() << "图片保存失败!";
}
}
licensePlateRecogn.licensePlateNumber(frame, lpNumber,plate,currentTime,ctx);
std::map<int,RecognizedInfo>exitMoMap;
vides_data::requestLicensePlate newPlate;
newPlate.sn=sSn;
qDebug()<<QString("识别的车牌号是:%1").arg(lpNumber);
std::list<vides_data::LicensePlate>ps=plate.plates;
int res=-1;
this->matToBase64(frame, imgs);
if(ps.size()==0){
for (auto it = parkMap.begin(); it != parkMap.end(); ++it) {
ParkingSpaceInfo* value = it->second; // 获取值
if(value->getQueue().size()>=10) {
value->removeQueue();
}
RecognizedInfo recognizedInfo(lpNumber,QDateTime::currentSecsSinceEpoch(),"未知");
value->addQueue(recognizedInfo);
this->checkAndUpdateCurrentPlate(value,frame,recognizedInfo,res,exitMoMap);
if (res == Exit || res == Mobilization) {
vides_data::LicensePlate current;
current.areaLocation=value->getArea();
current.img=imgs;
current.new_color=recognizedInfo.getColor();
current.new_plate=recognizedInfo.getLicensePlate();
current.time=recognizedInfo.getRecognizeTime();
newPlate.plates.push_back(std::move(current));
}
}
}else{
std::unordered_map<int, vides_data::LicensePlate> indexToLicensePlate;
for (auto it_ps = ps.begin(); it_ps != ps.end(); ++it_ps) {
vides_data::LicensePlate& currentPlate = *it_ps;
ParkingSpaceInfo newcurrentPlate;
newcurrentPlate.setArea(currentPlate.recognition);
int index = this->findPointRegion(newcurrentPlate);
qDebug()<<"识别的区域:"<<index;
indexToLicensePlate[index] = currentPlate;
}
for (auto it = parkMap.begin(); it != parkMap.end(); ++it) {
int key = it->first;
ParkingSpaceInfo* value = it->second; // 获取值
if (indexToLicensePlate.count(key) > 0) {
if(value->getQueue().size()>=10) {
value->removeQueue();
}
vides_data::LicensePlate recognition= indexToLicensePlate.at(key);
RecognizedInfo recognizedInfo(recognition.new_plate,recognition.time,recognition.new_color);
value->addQueue(recognizedInfo);
this->checkAndUpdateCurrentPlate(value,frame,recognizedInfo,res,exitMoMap);
if (res == Exit || res == Mobilization) {
recognition.areaLocation=value->getArea();
recognition.img=imgs;
recognition.new_color=recognizedInfo.getColor();
newPlate.plates.push_back(std::move(recognition));
}
if(res==ExitAndMobilization){
if(exitMoMap.size()>0){
recognition.areaLocation=value->getArea();
recognition.img=imgs;
recognition.new_color=recognizedInfo.getColor();
newPlate.plates.push_back(std::move(recognition));
RecognizedInfo exitInfo=exitMoMap[Exit];
vides_data::LicensePlate oldInfo;
oldInfo.areaLocation=value->getArea();
oldInfo.img=imgs;
oldInfo.new_color=exitInfo.getColor();
oldInfo.new_plate=exitInfo.getLicensePlate();
oldInfo.time=exitInfo.getRecognizeTime();
newPlate.plates.push_back(std::move(oldInfo));
}
}
}else{
if(value->getQueue().size()>=10) {
value->removeQueue();
}
RecognizedInfo recognizedInfo("", QDateTime::currentSecsSinceEpoch(), "未知");
value->addQueue(recognizedInfo);
int res;
this->checkAndUpdateCurrentPlate(value, frame, recognizedInfo, res,exitMoMap);
if (res == Exit || res == Mobilization) {
vides_data::LicensePlate current;
current.areaLocation = value->getArea();
current.img = imgs;
current.new_color = recognizedInfo.getColor();
current.new_plate = recognizedInfo.getLicensePlate();
current.time = recognizedInfo.getRecognizeTime();
current.recognition=value->getArea();
newPlate.plates.push_back(std::move(current));
}
if(res==ExitAndMobilization){
vides_data::LicensePlate current;
current.areaLocation = value->getArea();
current.img = imgs;
current.new_color = recognizedInfo.getColor();
current.new_plate = recognizedInfo.getLicensePlate();
current.time = recognizedInfo.getRecognizeTime();
newPlate.plates.push_back(std::move(current));
RecognizedInfo exitInfo=exitMoMap[Exit];
vides_data::LicensePlate oldInfo;
oldInfo.areaLocation=value->getArea();
oldInfo.img=imgs;
oldInfo.new_color=exitInfo.getColor();
oldInfo.new_plate=exitInfo.getLicensePlate();
oldInfo.time=exitInfo.getRecognizeTime();
newPlate.plates.push_back(std::move(oldInfo));
}
}
}
}
qDebug()<<QString("%1==>当前车牌数量:%2").arg(sSn).arg(ps.size());
if(newPlate.plates.size()>0){
licensePlateRecognitionResults(newPlate);
foreach (auto var, plate.plates) {
qDebug()<<QString("sn:%1 =>识别的车牌号是:%2").arg(sSn).arg(var.new_plate);
}
}
}
void CameraHandle::pushRecordToCloud(int id, int recognitionType, QString ossUrl){
HttpService httpService(ossUrl);
Common & instace= Common::getInstance();
auto map= getCurrentData();
QString videoPath= map.at("downloadFileName");
QString access_key_id= map.at("access_key_id");
QString access_key_secret= map.at("access_key_secret");
QString bucket= map.at("bucket");
QString securityToken=map.at("security_token");
vides_data::response*res=httpService.httpUploadFile(videoPath,access_key_id,access_key_secret,bucket,securityToken);
if(res->code==0){
httpService.setHttpUrl(httpUrl);
vides_data::response* reco=httpService.httpPostRecord(id,recognitionType,sSn,
ossUrl);
if(reco->code!=0){
qInfo()<<"识别录像地址上传失败";
}
instace.deleteObj(reco);
}
instace.deleteObj(res);
}
void CameraHandle::licensePlateRecognitionResults(vides_data::requestLicensePlate &location){
Common & instace= Common::getInstance();
QByteArray imgs;
int maxRetryCount = 2; // 最大重试次数
int retryCount = 0; // 当前重试次数
bool requestSuccess = false; // 标记请求是否成功
while (retryCount < maxRetryCount && !requestSuccess) {
HttpService httpService(httpUrl);
std::list<vides_data::responseRecognitionData> result;
vides_data::response*resp= httpService.httpLicensePlateRecognition(location,result);
if (resp->code == 0) {
if(result.size()==0){
return ;
}
vides_data::responseStsCredentials sts_credentials=HttpService::stsCredentials;
QString oUrl = "http://" + sts_credentials.bucket + "." + sts_credentials.endpoint;
currentData["ossUrl"] =oUrl;
currentData["bucket"] = sts_credentials.bucket;
currentData["access_key_id"] =sts_credentials.access_key_id;
currentData["access_key_secret"] =sts_credentials.access_key_secret;
currentData["security_token"]=sts_credentials.security_token;
// foreach (auto var, result) {
// vides_data::responseRecognitionData data;
// data.id=var.id;
// data.inTime=var.inTime;
// data.outTime=var.outTime;
// data.recognitionType=var.recognitionType;
// data.sn=var.sn;
// videoCurrentData[var.id]=data;
// sdkDownloadFileByTime(this->hDevice,var.id,
// instace.timestampToDateString(var.inTime),instace.timestampToDateString(var.outTime));
// }
requestSuccess = true;
} else {
++retryCount;
}
instace.deleteObj(resp);
}
if (!requestSuccess) {
qInfo()<<"licensePlateRecognitionResults:车牌识别结果失败";
// 在达到最大重试次数且仍然没有成功的情况下执行相应的处理逻辑
}
}
void CameraHandle::sdkDevSystemTimeZoneSyn(QString &time){
QByteArray bTime =time.toUtf8();
const char* cTime=bTime.data();
char outBuffer[512] = { 0 };
int nInOutBufSize = sizeof(outBuffer);
const char* zoneCfg ="{ \"FirstUserTimeZone\" : \"true\", \"OPTimeSetting\" : \"800\" }";
int res = XSDK_DevSetSysConfigSyn(hDevice, JK_System_TimeZone, zoneCfg, strlen(zoneCfg), outBuffer, &nInOutBufSize, 5000, EXCMD_CONFIG_GET);
if(res<0){
qInfo() << "FirstUserTimeZone:修改失败";
}
res=XSDK_DevSynTime(hDevice,cTime,0);
if(res<0){
qInfo() << "sdkDevSystemTimeZoneSyn:修改失败";
}
}
//录像设置
void CameraHandle::sdkRecordCfg(const char * recordJson){
qDebug()<<recordJson;
char szOutBuffer[512] = { 0 };
int nLen = sizeof(szOutBuffer);;
int res=XSDK_DevSetSysConfigSyn(hDevice,JK_Record,recordJson,strlen(recordJson),szOutBuffer,&nLen,5000,EXCMD_CONFIG_SET);
if(res<0){
qInfo() << "sdkRecordCfg 录像设置->修改失败"<<res;
}
}
//配置编码设置
void CameraHandle::sdkEncodeCfg(const char* pCfg){
char szOutBuffer[512] = { 0 };
int nLen = sizeof(szOutBuffer);
int res=XSDK_DevSetSysConfigSyn(hDevice,JK_Simplify_Encode,pCfg,strlen(pCfg),szOutBuffer,&nLen,5000,EXCMD_CONFIG_SET);
if(res<0){
qInfo() << "sdkEncodeCfg 配置编码设置->修改失败"<<res;
}
}
void CameraHandle::sdkDevSpvMn(const char *spvMn){
char szOutBuffer[512] = { 0 };
int nLen = sizeof(szOutBuffer);
qDebug()<<spvMn;
int res=XSDK_DevSetSysConfigSyn(hDevice,JK_NetWork_SPVMN,spvMn,strlen(spvMn),szOutBuffer,&nLen,5000,EXCMD_CONFIG_SET);
if(res<0){
qInfo() << "sdkDevSpvMn 28181->修改失败"<<res;
}
}
bool CameraHandle::polygonsOverlap( ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2) {
QPolygonF realPolygon;
realPolygon << QPointF(poly1.getArea().topLeftCornerX, poly1.getArea().topLeftCornerY)
<< QPointF(poly1.getArea().bottomLeftCornerX, poly1.getArea().bottomLeftCornerY)
<< QPointF(poly1.getArea().bottomRightCornerX, poly1.getArea().bottomRightCornerY)
<< QPointF(poly1.getArea().topRightCornerX, poly1.getArea().topRightCornerY);
QPainterPath realPath;
realPath.addPolygon(realPolygon);
QPolygonF spacePolygon;
spacePolygon << QPointF(poly2.getArea().topLeftCornerX, poly2.getArea().topLeftCornerY)
<< QPointF(poly2.getArea().bottomLeftCornerX, poly2.getArea().bottomLeftCornerY)
<< QPointF(poly2.getArea().bottomRightCornerX, poly2.getArea().bottomRightCornerY)
<< QPointF(poly2.getArea().topRightCornerX, poly2.getArea().topRightCornerY);
QPainterPath spacePath;
spacePath.addPolygon(spacePolygon);
// 使用intersected方法获取两个路径的交集
QPainterPath intersection = realPath.intersected(spacePath);
// 如果交集不为空,则两个多边形重叠
return !intersection.isEmpty();
}
double CameraHandle::calculateIntersectionArea(const QPolygonF &polygon1, const QPolygonF &polygon2) {
QPolygonF intersection = polygon1.intersected(polygon2);
int n = intersection.count();
if (n < 3) return 0.0;
// 构建增量式凸包
std::vector<QPointF> convexHullPoints;
for (const QPointF& point : intersection) {
while (convexHullPoints.size() >= 2 && ccw(convexHullPoints[convexHullPoints.size() - 2], convexHullPoints.back(), point) <= 0) {
convexHullPoints.pop_back();
}
convexHullPoints.push_back(point);
}
double area = 0.0;
for (size_t i = 0; i < convexHullPoints.size(); ++i) {
size_t j = (i + 1) % convexHullPoints.size();
area += (convexHullPoints[i].x() * convexHullPoints[j].y() - convexHullPoints[j].x() * convexHullPoints[i].y());
}
return qAbs(area) / 2.0;
}
// 计算叉乘
double CameraHandle::ccw(const QPointF& a, const QPointF& b, const QPointF& c) {
return (b.x() - a.x()) * (c.y() - a.y()) - (c.x() - a.x()) * (b.y() - a.y());
}
int CameraHandle::findPointRegion(ParkingSpaceInfo &prakArea){
//左下、右下、右上、左上。
double maxIntersectionArea = 0.0;
int areaOfMaxIntersection = -1;
std::vector<cv::Point2f> currentPolygonPoints = {
cv::Point2f(prakArea.getArea().topLeftCornerX, prakArea.getArea().topLeftCornerY),
cv::Point2f(prakArea.getArea().topRightCornerX, prakArea.getArea().topRightCornerY),
cv::Point2f(prakArea.getArea().bottomRightCornerX, prakArea.getArea().bottomRightCornerY),
cv::Point2f(prakArea.getArea().bottomLeftCornerX, prakArea.getArea().bottomLeftCornerY)
};
qDebug() << "Current Polygon Points:";
for (const auto& point : currentPolygonPoints) {
qDebug() << "(" << point.x << ", " << point.y << ")";
}
for (ParkingSpaceInfo *info : parkingSpaceInfos) {
std::vector<cv::Point2f> polygonInfoPoints = {
cv::Point2f(info->getArea().topLeftCornerX, info->getArea().topLeftCornerY),
cv::Point2f(info->getArea().topRightCornerX, info->getArea().topRightCornerY),
cv::Point2f(info->getArea().bottomRightCornerX, info->getArea().bottomRightCornerY),
cv::Point2f(info->getArea().bottomLeftCornerX, info->getArea().bottomLeftCornerY)
};
// 打印 polygonInfoPoints 的值
qDebug() << "Polygon Info Points for Space " << info->getSpaceIndex() << ":";
for (const auto& point : polygonInfoPoints) {
qDebug() << "(" << point.x << ", " << point.y << ")";
}
std::vector<cv::Point2f> intersection;
double intersectionArea = cv::intersectConvexConvex(polygonInfoPoints, currentPolygonPoints, intersection, true);
if (intersectionArea>0.0 && intersectionArea > maxIntersectionArea) {
maxIntersectionArea = intersectionArea;
areaOfMaxIntersection = info->getSpaceIndex();
}
}
return areaOfMaxIntersection;
}
int CameraHandle::determineArea(ParkingSpaceInfo &prakArea){
double maxIntersectionArea = 0.0;
int areaOfMaxIntersection = -1;
QPolygon currentPolygon;
currentPolygon<< QPoint(prakArea.getArea().topLeftCornerX, prakArea.getArea().topLeftCornerY)
<< QPoint(prakArea.getArea().bottomLeftCornerX,prakArea.getArea().bottomLeftCornerY)
<< QPoint(prakArea.getArea().bottomRightCornerX, prakArea.getArea().bottomRightCornerY)
<< QPoint(prakArea.getArea().topRightCornerX, prakArea.getArea().topRightCornerY);
for (ParkingSpaceInfo *info : parkingSpaceInfos) {
QPolygon polygonInfo; // 移动定义到这里,确保每次迭代时重新初始化
polygonInfo << QPoint(info->getArea().topLeftCornerX, info->getArea().topLeftCornerY)
<< QPoint(info->getArea().bottomLeftCornerX, info->getArea().bottomLeftCornerY)
<< QPoint(info->getArea().bottomRightCornerX, info->getArea().bottomRightCornerY)
<< QPoint(info->getArea().topRightCornerX, info->getArea().topRightCornerY);
if (polygonsOverlap(prakArea, *info)) {
double currentIntersection = calculateIntersectionArea(polygonInfo, currentPolygon);
if (currentIntersection > maxIntersectionArea) {
maxIntersectionArea = currentIntersection;
areaOfMaxIntersection = info->getSpaceIndex();
}
}
}
return areaOfMaxIntersection;
}
void CameraHandle::initParkingSpaceInfo(const std::list<vides_data::responseArea> &areas){
int index = 1;
for (auto area = areas.begin(); area != areas.end(); ++area) {
ParkingSpaceInfo* info = new ParkingSpaceInfo();
vides_data::ParkingArea pArea;
pArea.bottomLeftCornerX = area->bottom_left_corner_x;
pArea.bottomLeftCornerY = area->bottom_left_corner_y;
pArea.topLeftCornerX = area->top_left_corner_x;
pArea.topLeftCornerY = area->top_left_corner_y;
pArea.topRightCornerX = area->top_right_corner_x;
pArea.topRightCornerY = area->top_right_corner_y;
pArea.bottomRightCornerX = area->bottom_right_corner_x;
pArea.bottomRightCornerY = area->bottom_right_corner_y;
info->setArea(pArea);
if (parkMap.find(index) == parkMap.end()) {
info->setSpaceIndex(index); // Assuming this method sets the space index
parkMap[index] = info;
parkingSpaceInfos.push_back(info);
}
index++;
}
}
bool CameraHandle::compareLists(const std::list<vides_data::responseArea>& newAreas) {
std::list<vides_data::ParkingArea> areas;
for (const auto& spaceInfoPtr : parkingSpaceInfos) {
if (!spaceInfoPtr) {
continue;
}
vides_data::ParkingArea area = spaceInfoPtr->getArea();
areas.push_back(area);
}
// 检查两个列表的大小是否相同
if (newAreas.size() != areas.size()) {
return false;
}
auto itResponse = newAreas.begin();
auto itParking = areas.begin();
// 逐个比较 responseArea 和 ParkingArea 对象是否相同
while (itResponse != newAreas.end() && itParking != areas.end()) {
if (itResponse->bottom_right_corner_x != itParking->bottomRightCornerX ||
itResponse->bottom_right_corner_y != itParking->bottomRightCornerY ||
itResponse->bottom_left_corner_x != itParking->bottomLeftCornerX ||
itResponse->bottom_left_corner_y != itParking->bottomLeftCornerY ||
itResponse->top_left_corner_x != itParking->topLeftCornerX ||
itResponse->top_left_corner_y != itParking->topLeftCornerY ||
itResponse->top_right_corner_x != itParking->topRightCornerX ||
itResponse->top_right_corner_y != itParking->topRightCornerY) {
return false; // 如果任意一个元素不匹配,则返回 false
}
++itResponse;
++itParking;
}
return true;
}
void CameraHandle::updateParkMapAndParkingSpaceInfos(const std::list<vides_data::responseArea>&newAreas){
Common & instace= Common::getInstance();
parkingSpaceInfos.clear();
for(auto iter = parkMap.begin(); iter != parkMap.end(); ++iter) {
instace.deleteObj( iter->second);
}
parkMap.clear();
initParkingSpaceInfo(newAreas);
}
#ifndef CAMERAHANDLE_H
#define CAMERAHANDLE_H
#include "RecognitionInfo.h"
#include "FaceRecognition.h"
#include "HttpService.h"
#include "LicensePlateRecognition.h"
#include "Json_Header/AlarmInfo.h"
#include "Json_Header/NetWork_NetCommon.h"
#include "Json_Header/OPTimeSetting.h"
#include "Json_Header/Simplify_Encode.h"
#include "Json_Header/System_TimeZone.h"
#include "Json_Header/RecordCfg.h"
#include "Json_Header/NetWork_SPVMN.h"
#include "mainwindow.h"
#include "ParkingSpaceInfo.h"
#include "so_human_sdk.h"
#include "hyper_lpr_sdk.h"
#include <QPolygon>
#include <QPainterPath>
#include <QProcess>
#include <QDateTime>
#include <QJsonDocument>
#include <memory>
#include <QString>
#include <QObject>
#include <QTimer>
#include <QThreadPool>
#include <QQueue>
#include <opencv2/opencv.hpp>
#include <QSemaphore>
enum CAR_INFORMATION {
Exit, //出场
Mobilization, //进场
ExitAndMobilization //
};
class CameraHandle: public QObject {
Q_OBJECT
public:
CameraHandle(QString &url,QString &httpUrl,QString &sSn, int &channel,const QString &modelPaths,
float carConfidence,int imageSave);
CameraHandle();
~CameraHandle();
int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
//int SdkMediaGetFaceImage(int hDevice, int nSeq, int nTimeout);
int sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener);
int getHdevice();
int getChannel();
void clearCameraHandle();
// void rebindTimer(int hDevice);
void initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer);
void updateImage(const cv::Mat & frame,qint64 currentTime);
void matToBase64(const cv::Mat &image, QByteArray &base64Data);
int callbackFunction(XSDK_HANDLE hObject,QString &szString);
void checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat & frame,RecognizedInfo& newInfo,int &result,std::map<int,RecognizedInfo>&exitAndMoMap);
void licensePlateRecognitionResults(vides_data::requestLicensePlate &location);
void sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel);
//时间设置
void sdkDevSystemTimeZoneSyn(QString &time);
//录像设置
void sdkRecordCfg(const char * recordJson);
//配置编码设置
void sdkEncodeCfg(const char *enCode);
//28181更新
void sdkDevSpvMn(const char* spvMn);
void sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
QString startTimer,QString endTime);
void setTimeoutMs(int timeoutMs);
bool acquireAndReleaseWithTimeout(bool flag);
QString getSSn();
int getMediaHandle();
void setMediaHandle(int mediaHandle);
void setCurrentFace(int currentFace);
void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas);
bool compareLists(const std::list<vides_data::responseArea>& newAreas);
void updateParkMapAndParkingSpaceInfos(const std::list<vides_data::responseArea>&newAreas);
std::map<int, vides_data::responseRecognitionData>&getVideoCurrentData();
std::map<QString, QString>&getCurrentData();
// 检查点是否在多边形内
bool polygonsOverlap(ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2);
// 计算两个多边形的交集面积
double calculateIntersectionArea(const QPolygonF &polygon1, const QPolygonF &polygon2);
double ccw(const QPointF& a, const QPointF& b, const QPointF& c);
void getCurrentFrame(std::vector<uchar> &buffer);
int findPointRegion(ParkingSpaceInfo &prakArea);
int determineArea(ParkingSpaceInfo &prakArea);
signals:
void callbackFrameReady(const cv::Mat &frame, const QString &url);
void afterDownloadFile( int id,int recognitionType,QString ossUrl);
private slots:
void sdkRealTimeDevSnapSyn(int hDevice);
void pushRecordToCloud(int id,int recognitionType,QString ossUrl);
void releaseSemaphore();
private :
int hDevice;
int channel;
QString httpUrl;
SXSDKLoginParam *loginParam;
SXMediaFaceImageReq *sxMediaFaceImageReq;
std::mutex plateMutex;
std::mutex faceMutex;
QString sSn;
QString url;
std::map<int, vides_data::responseRecognitionData> videoCurrentData;
std::map<QString, QString> currentData;
//每个区域编号对应一个区域信息
std::map<int,ParkingSpaceInfo*>parkMap;
//当前相机监视所以车位区域
std::vector<ParkingSpaceInfo*>parkingSpaceInfos;
//当前人脸数
int currentFace;
int mediaHandle;
//2秒钟抓一次图
QTimer *dev_snap_syn_timer;
QTimer *release_timer;
int offlineCount=0;
TCV_HumanDetector *detector;
P_HLPR_Context ctx ;
QSemaphore semaphore;
int timeoutMs;
int image_save;
};
#endif // CAMERAHANDLE_H
#include "CameraRealThread.h"
#include <QDebug>
#include <QProcess>
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
}
CameraRealThread::CameraRealThread(const QString& rtspUrl, const QString& rtmpUrl) : m_rtspUrl(rtspUrl), m_rtmpUrl(rtmpUrl) {
avformat_network_init();
}
CameraRealThread::~CameraRealThread(){
// avformat_network_deinit();
}
void CameraRealThread::run(){
// 打开 RTSP 流
QByteArray rtByteArray = m_rtspUrl.toUtf8();
const char* rt_url = rtByteArray.data();
QByteArray rmByteArray = m_rtmpUrl.toUtf8();
const char* rm_url ="flv://rtmp://push.sydip.com/live/44ad6edc63896566?auth_key=1707189971-0-0-dbac3b3c1f29652323355096908b2a62" ;//rmByteArray.data();
qDebug() << "rm_url"<<rm_url;
QProcess process;
process.start("ffmpeg -formats | grep flv");
process.waitForFinished();
// 读取ffmpeg命令的输出结果
QString output = process.readAllStandardOutput();
// 检查输出结果中是否包含FLV格式信息
if (output.contains("FLV (Flash Video)")) {
qDebug() << "FFmpeg支持FLV格式";
} else {
qDebug() << "FFmpeg不支持FLV格式";
}
avformat_network_init();
// 输入(RTSP)和输出(RTMP)的URL
AVFormatContext *input_ctx = nullptr;
AVFormatContext *output_ctx = nullptr;
int ret;
// 打开输入流
if ((ret = avformat_open_input(&input_ctx, rt_url, nullptr, nullptr)) < 0) {
return ;
}
if ((ret = avformat_find_stream_info(input_ctx, nullptr)) < 0) {
return ;
}
char filename[64] = "test.ts";
// 打开输出流
avformat_alloc_output_context2(&output_ctx, nullptr, "mpegts", filename); // FLV格式用于RTMP
if (!output_ctx) {
qDebug()<< "Cannot create output context";
return;
}
// 从输入流复制流信息到输出流
for (int i = 0; i < input_ctx->nb_streams; i++) {
AVStream *in_stream = input_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(output_ctx, nullptr);
if (!out_stream) {
return ;
}
avcodec_parameters_copy(out_stream->codecpar, in_stream->codecpar);
out_stream->codecpar->codec_tag = 0;
}
// 打开输出URL
if (!(output_ctx->oformat->flags & AVFMT_NOFILE)) {
if (avio_open(&output_ctx->pb, rm_url, AVIO_FLAG_WRITE) < 0) {
return ;
}
}
// 写输出流的头部信息
if (avformat_write_header(output_ctx, nullptr) < 0) {
return ;
}
// 主循环,从输入读取数据并写入输出
AVPacket pkt;
while (true) {
AVStream *in_stream, *out_stream;
// 获取一个数据包
if (av_read_frame(input_ctx, &pkt) < 0)
break;
in_stream = input_ctx->streams[pkt.stream_index];
out_stream = output_ctx->streams[pkt.stream_index];
// 复制数据包
pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
pkt.dts = pkt.pts;
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
pkt.pos = -1;
// 写数据包到输出
if (av_interleaved_write_frame(output_ctx, &pkt) < 0) {
break;
}
av_packet_unref(&pkt);
}
// 写输出流的尾部信息
av_write_trailer(output_ctx);
// 关闭输入和输出
avformat_close_input(&input_ctx);
if (output_ctx && !(output_ctx->oformat->flags & AVFMT_NOFILE))
avio_closep(&output_ctx->pb);
avformat_free_context(output_ctx);
avformat_network_deinit();
}
#ifndef CAMERAREALTHREAD_H
#define CAMERAREALTHREAD_H
#include <QThread>
#include <QString>
class CameraRealThread : public QThread {
Q_OBJECT
public:
CameraRealThread(const QString& rtspUrl, const QString& rtmpUrl);
~CameraRealThread();
protected:
void run() override;
private:
QString m_rtspUrl;
QString m_rtmpUrl;
};
#endif // CAMERAREALTHREAD_H
//#include "CameraThread.h"
//CameraThread::CameraThread(QObject *parent) : QThread(parent),running(true) {
//}
//CameraThread::~CameraThread() {
// // 清理代码,例如停止线程、清除任务队列等
// running.store( false); // 停止线程
// if (isRunning()) {
// wait(); // 等待线程结束
// }
//}
//void CameraThread::addTask(const Task &task) {
// // 将任务添加到队列中
// std::lock_guard<std::mutex> lock(taskMutex);
// taskQueue.push(std::move(task));
// taskCondition.notify_one(); // 通知线程有新任务
//}
//// 重写QThread的run方法
//void CameraThread::run() {
// while (running) {
// std::unique_lock<std::mutex> lock(taskMutex); // 使用unique_lock管理锁
// taskCondition.wait(lock, [this] { return !taskQueue.empty() || !running; });
// if (!taskQueue.empty()) {
// Task task = taskQueue.front();
// taskQueue.pop();
// lock.unlock(); // 解锁,以便在执行任务时其他线程可以操作队列
// task();// 执行任务
// lock.lock(); // 重新加锁
// }
// }
//}
//#ifndef CAMERATHREAD_H
//#define CAMERATHREAD_H
//#include <functional>
//#include "XSDKPublic.h"
//#include "XNetSDKSyn.h"
//#include "XNetSDKDefine.h"
//#include <queue>
//#include <QThread>
//#include <mutex>
//#include <condition_variable>
//#include <atomic>
//using namespace std;
//class CameraThread : public QThread {
// Q_OBJECT
//public:
// CameraThread(QObject *parent = nullptr);
// ~CameraThread();
// typedef function<void()> Task;
// void addTask(const Task &task);
//protected:
// void run() override;
//private:
// std::queue<Task> taskQueue;
// std::mutex taskMutex;
// std::condition_variable taskCondition;
// std::atomic<bool> running{true};
//};
//#endif // CAMERATHREAD_H
#include "Common.h"
Common::Common(){}
QString Common::getTimeString() {
QDateTime currentDateTime = QDateTime::currentDateTime();
QString formattedDateTime = currentDateTime.toString("yyyy-MM-dd hh:mm:ss");
return formattedDateTime;
}
QString Common::timestampToDateString(qint64 timestamp) {
QDateTime dateTime = QDateTime::fromSecsSinceEpoch(timestamp);
QString formattedDate = dateTime.toString("yyyy-MM-dd hh:mm:ss");
return formattedDate;
}
QString Common::generateSignature(const QString& accessKeySecret, const QString& verb,
const QString& contentMD5, const QString& contentType,
const QString& date, const QString& ossHeaders,
const QString& canonicalizedResource)
{
// 构造规范化的字符串
QString stringToSign = verb + "\n" +
contentMD5 + "\n" +
contentType + "\n" +
date + "\n" +
ossHeaders+ "\n" + // 添加'\n'分隔符,并确保ossHeaders末尾没有多余的空白
canonicalizedResource;
// 将密钥和消息转换为字节数组
QByteArray hmacKey = accessKeySecret.toUtf8();
QByteArray message = stringToSign.toUtf8();
// 使用HMAC-SHA1计算签名
QMessageAuthenticationCode mac(QCryptographicHash::Sha1);
mac.setKey(hmacKey);
mac.addData(message);
QByteArray signature = mac.result().toBase64(); // 直接使用QMessageAuthenticationCode的结果
return QString(signature);
}
QString Common::getVideoOut(){
return videoOut;
}
void Common::setVideoOut(QString videoOut){
videoOut.append("/");
this->videoOut=videoOut;
}
QString Common::getVideoDownload(){
return videoDownload;
}
void Common::setVideoDownload(QString videoDownload){
videoDownload.append("/");
this->videoDownload=videoDownload;
}
QString Common::getImages(){
return images;
}
void Common::setImages(QString images){
images.append("/");
this->images=images;
}
QString Common::GetLocalIp() {
QString ipAddress;
QList<QHostAddress> list = QNetworkInterface::allAddresses();
for (const QHostAddress& address : list) {
if (address != QHostAddress::LocalHost && address.toIPv4Address()) {
ipAddress = address.toString();
break;
}
}
if (ipAddress.isEmpty()) {
ipAddress = QHostAddress(QHostAddress::LocalHost).toString();
}
return ipAddress;
}
Common::~Common(){}
#ifndef COMMON_H
#define COMMON_H
#include <ctime>
#include <chrono>
#include <thread>
#include <QString>
#include <cstring>
#include <QDateTime>
#include <QByteArray>
#include <QCryptographicHash>
#include <QMessageAuthenticationCode>
#include <QNetworkInterface>
#include <QDebug>
class Common
{
public:
Common(const Common &)=delete;
void operator=(const Common &)=delete;
static Common &getInstance(){
static Common instance;
return instance;
}
QString getTimeString();
QString timestampToDateString(qint64 timestamp);
QString generateSignature(const QString& accessKeySecret, const QString& verb,
const QString& contentMD5, const QString& contentType,
const QString& date, const QString& ossHeaders,
const QString& canonicalizedResource);
QString getVideoOut();
void setVideoOut(QString videoOut);
QString getVideoDownload();
void setVideoDownload(QString videoDownload);
QString getImages();
void setImages(QString images);
QString GetLocalIp();
template<typename T>
void deleteObj(T*& obj) {
if(obj != nullptr) {
delete obj;
obj = nullptr;
}
}
private:
QString videoOut;
QString videoDownload;
QString images;
Common();
~Common();
};
#endif // COMMON_H
//#include "FaceDetectionParkingPush.h"
//FaceDetectionParkingPush::FaceDetectionParkingPush(){}
//FaceDetectionParkingPush::FaceDetectionParkingPush(QString &url, QString &httpUrl, QString &sSn, QTimer* devSnapSynTimer, int &channel)
// : hDevice(-1),
// currentConStatus(false),
// loginParam(new SXSDKLoginParam()),
// sxMediaFaceImageReq(new SXMediaFaceImageReq()),
// channel(channel),
// dev_snap_syn_timer(new QTimer(this)) {
//}
//FaceDetectionParkingPush::~FaceDetectionParkingPush() {
// Common & instace= Common::getInstance();
// instace.deleteObj(dev_snap_syn_timer);
// instace.deleteObj(loginParam);
// instace.deleteObj(sxMediaFaceImageReq);
// QThreadPool::globalInstance()->waitForDone();
//}
//int FaceDetectionParkingPush::SdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout) {
// QByteArray byteArray = sDevId.toUtf8();
// char* cDevid=byteArray.data();
// strcpy(loginParam->sDevId, cDevid);
// loginParam->nDevPort=nDevPort;
// QByteArray byteName = sUserName.toUtf8();
// char* cName=byteName.data();
// strcpy(loginParam->sUserName, cName);
// if(sPassword.length()>0){
// QByteArray bytePassword = sPassword.toUtf8();
// strcpy(loginParam->sPassword, bytePassword.constData());
// }else{
// strcpy(loginParam->sPassword, "");
// }
// loginParam->nCnnType=EDEV_CNN_TYPE_AUTO;
// int loginResult =XSDK_DevLoginSyn(loginParam,nTimeout);
// if(loginResult<0){
// qInfo() << "登录设备失败";
// return loginResult;
// }
// return loginResult;
//}
//int FaceDetectionParkingPush::SdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener) {
// return XSDK_DevSetAlarmListener(hDevice,bListener);
//}
//int FaceDetectionParkingPush::getChannel(){
// return channel;
//}
//int FaceDetectionParkingPush::getHdevice() {
// return hDevice;
//}
//bool FaceDetectionParkingPush::getConnectionStatus() {
// return currentConStatus;
//}
//void FaceDetectionParkingPush::initSdkRealTimeDevSnapSyn(int hDevice) {
// connect(dev_snap_syn_timer, &QTimer::timeout, this, [this,hDevice]() {
// this->SdkRealTimeDevSnapSyn(hDevice);
// }, Qt::QueuedConnection);
// dev_snap_syn_timer->start(2000);
//}
//void FaceDetectionParkingPush::SdkRealTimeDevSnapSyn(int hDevice) {
// QThreadPool* threadPool = QThreadPool::globalInstance();
// threadPool->setMaxThreadCount(6);
// if (!task) { // 检查是否已经创建了TaskRunnable
// task = new TaskRunnable(this, hDevice, this->channel,TaskRunnable::SdkDevSnapSyn);
// task->setAutoDelete(false); // 确保task不会在执行后被自动删除
// }else {
// task->setHdevice(hDevice);
// task->setChannel(this->channel);
// task->setCallbackFunction(TaskRunnable::SdkDevSnapSyn);
// }
// if (!threadPool->tryStart(task)) { // 尝试启动任务,如果线程池满了则不会启动
// qDebug() << "线程池已满,无法启动TaskRunnable";
// }
//}
//TaskRunnable*FaceDetectionParkingPush::getTask(){
// return task;
//}
//void FaceDetectionParkingPush::setTask(TaskRunnable* newTask){
// this->task=newTask;
//}
//int FaceDetectionParkingPush::CallbackFunction(XSDK_HANDLE hObject, QString &szString) {
// QByteArray && byJson = szString.toLocal8Bit();
// const char * cJson= byJson.data();
// XSDK_CFG::AlarmInfo alarmInfo;
// if (0 == alarmInfo.Parse(cJson))
// {
// const char* buf = alarmInfo.Event.ToString();
// qInfo() << "buf:"<<buf;
// qInfo() << "OnDevAlarmCallback[Dev:" << hObject << "]"
// << "\r\nEvent:" << alarmInfo.Event.Value()
// << "\r\nChannel:" << alarmInfo.Channel.Value()
// << "\r\nStartTime:" << alarmInfo.StartTime.Value()
// << "\r\nStatus:" << alarmInfo.Status.Value();
// }
// else
// {
// qDebug() << "OnDevAlarmCallback[Dev:" << hObject << "][Event:" << szString << "]";
// }
// cv::Mat image;
// MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
// mediaFaceImage->FaceImageCallBack(hObject,sxMediaFaceImageReq->nChannel,image);
// if (image.empty())
// {
// qInfo() << "Failed to read the image";
// return -1;
// }
// this->updateImage(image, url);
//}
//void FaceDetectionParkingPush::updateImage(const cv::Mat & frame,const QString &respUrl){
// FaceReconition &faceRecognition = FaceReconition::getInstance();
// LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance();
// //faceRecognition.search(frame,imageHandleList,names);
// if(faceRecognition.doesItExistEmployee(frame)){
// }
// QString lpNumber;
// licensePlateRecogn.licensePlateNumber(frame, lpNumber);
// std::lock_guard<std::mutex> guard(queueMutex);
// if (queuels.size() >= 10) {
// queuels.dequeue();
// }
// RecognizedInfo recognizedInfo(lpNumber,QDateTime::currentMSecsSinceEpoch());
// queuels.enqueue(recognizedInfo);
// this->CheckAndUpdateCurrentPlate(recognizedInfo);
// if(lpNumber.length()>0){
// qDebug()<<QString("识别的车牌号是:%1").arg(lpNumber);
// }else {
// qDebug()<<QString("当前这帧图像未识别车牌");
// }
//}
//void FaceDetectionParkingPush::CheckAndUpdateCurrentPlate( RecognizedInfo& newInfo){
// std::lock_guard<std::mutex> guard(plateMutex);
// if (newInfo.getLicensePlate() != currentPlate.getLicensePlate()) {
// int count = 0;
// for (auto& info : queuels) {
// if (info.getLicensePlate() == newInfo.getLicensePlate()) {
// count++;
// }
// }
// //第一次进场 当前车牌就是进来这个,老车牌就是空
// //出场的时候 当前车牌是空, 老车牌是出厂车牌
// if (count >= 3) {
// if(currentPlate.getLicensePlate().length()<=0){
// qInfo()<<"未出场车:"<<currentPlate.getLicensePlate()<<"进场的车牌号:"<<newInfo.getLicensePlate();
// }else {
// qInfo()<<"出场车牌号:"<<currentPlate.getLicensePlate()<<"进场的车牌号:"<<newInfo.getLicensePlate();
// }
// XSDK_HANDLE h_device=getHdevice();
// QMetaObject::invokeMethod(dev_snap_syn_timer, "stop", Qt::QueuedConnection);
// QObject::disconnect(dev_snap_syn_timer, &QTimer::timeout, MainWindow::sp_this, nullptr);
// QObject::connect(dev_snap_syn_timer, &QTimer::timeout, MainWindow::sp_this, [this, h_device]() {
// this->SdkRealTimeDevSnapSyn(h_device);
// }, Qt::QueuedConnection);
// QMetaObject::invokeMethod(dev_snap_syn_timer, "start", Qt::QueuedConnection, Q_ARG(int, 15000));
// currentPlate = newInfo;
// }
// }
//}
//void FaceDetectionParkingPush::SdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
// if(hDevice<=0){
// qInfo() << "相机断线";
// return;
// }
// cv::Mat image;
// MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
// int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image);
// Common & instace= Common::getInstance();
// vides_data::requestDeviceStatus reStatus;
// reStatus.sSn=sSn;
// if(ret>0){
// if(!currentConStatus){
// reStatus.status=1;
// reStatus.type=2;
// HttpService httpService(httpUrl);
// vides_data::response*re= httpService.httpPostDeviceStatus(reStatus);
// if(re->code!=0){
// qInfo()<<"请求设备状态失败";
// }
// instace.deleteObj(re);
// }
// currentConStatus=true;
// }else{
// if(currentConStatus){
// reStatus.status=0;
// reStatus.type=2;
// HttpService httpService(httpUrl);
// vides_data::response*re= httpService.httpPostDeviceStatus(reStatus);
// if(re->code!=0){
// qInfo()<<"请求设备状态失败";
// }
// instace.deleteObj(re);
// }
// currentConStatus=false;
// }
// if (image.empty())
// {
// qInfo() << "Failed to read the image";
// return;
// }
// this->updateImage(image, url);
//}
//#ifndef FACEDETECTIONPARKINGPUSH_H
//#define FACEDETECTIONPARKINGPUSH_H
//#include "RecognitionInfo.h"
//#include "FaceRecognition.h"
//#include "HttpService.h"
//#include "LicensePlateRecognition.h"
//#include "Json_Header/AlarmInfo.h"
//#include "TaskRunnable.h"
//#include "mainwindow.h"
//#include <QDateTime>
//#include <QJsonDocument>
//#include "Common.h"
//#include <QString>
//#include <QObject>
//#include <QTimer>
//#include <QThreadPool>
//#include <QQueue>
//#include <opencv2/opencv.hpp>
//class FaceDetectionParkingPush : public QObject {
// Q_OBJECT
//public:
// FaceDetectionParkingPush(QString &url,QString &httpUrl,QString &sSn,QTimer* devSnapSynTimer, int &channel);
// ~FaceDetectionParkingPush();
// FaceDetectionParkingPush();
// int SdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
// //int SdkMediaGetFaceImage(int hDevice, int nSeq, int nTimeout);
// int SdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener);
// int getHdevice();
// int getChannel();
// bool getConnectionStatus();
// void initSdkRealTimeDevSnapSyn(int hDevice);
// void updateImage(const cv::Mat & frame,const QString &respUrl);
// int CallbackFunction(XSDK_HANDLE hObject,QString &szString);
// void CheckAndUpdateCurrentPlate( RecognizedInfo& newInfo);
// void SdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel);
// TaskRunnable*getTask();
// void setTask(TaskRunnable* newTask);
//signals:
// void CallbackFrameReady(const cv::Mat &frame, const QString &url);
//public slots:
// void SdkRealTimeDevSnapSyn(int hDevice);
//private:
// TaskRunnable* task = nullptr;
// int hDevice;
// int channel;
// SXSDKLoginParam *loginParam;
// SXMediaFaceImageReq *sxMediaFaceImageReq;
// std::mutex queueMutex;
// std::mutex plateMutex;
// std::mutex taskThread;
// QQueue<RecognizedInfo> queuels;
// RecognizedInfo currentPlate;
// QString sSn;
// QString url;
// volatile bool currentConStatus=false;
// //2秒钟抓一次图
// QTimer *dev_snap_syn_timer;
//};
//#endif // FACEDETECTIONPARKINGPUSH_H
//#include "FaceDetectionParkingPush.h"
//#include "RecognitionInfo.h"
//#include "XSDKPublic.h"
//#include "Common.h"
//#include "MediaFaceImage.h"
//#include "Json_Header/AlarmInfo.h"
//#include "FaceRecognition.h"
//#include "LicensePlateRecognition.h"
//#include "mainwindow.h"
//#include "HttpService.h"
//#include <string.h>
//#include <functional>
//#include <QQueue>
//#include <QtCore/QThread>
//#include <QJsonDocument>
//class FaceDetectionParkingPushImpl {
//public:
// FaceDetectionParkingPushImpl(FaceDetectionParkingPush* parent,QString &url,QTimer* devSnapSynTimer,QString &httpUrl,QString &sSn);
// ~FaceDetectionParkingPushImpl();
// int SdkInit(QString &szConfigPath, QString &szTempPath,int channel);
// XSDK_HANDLE SdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
// XSDK_HANDLE SdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener);
// int CallbackFunction(XSDK_HANDLE hObject,QString &szString);
// void updateImage(const cv::Mat & frame,const QString &respUrl);
// void SdkDevSnapSyn(XSDK_HANDLE hDevice,int nChannel);
// void CheckAndUpdateCurrentPlate(RecognizedInfo& newInfo);
// CameraThread *getCameraThread();
// bool findInitAndCurrentStatus( bool initConStatus,bool currentConStatus);
//private:
// FaceDetectionParkingPush* parent;
// SXSDKLoginParam *loginParam;
// SXMediaFaceImageReq *sxMediaFaceImageReq;
// std::mutex queueMutex;
// std::mutex plateMutex;
// QQueue<RecognizedInfo> queuels;
// RecognizedInfo currentPlate;
// QString sSn;
// QString url;
// QString httpUrl;
// int channel;
// QTimer* devSnapSynTimer;
// volatile bool currentConStatus=false;
// XSDK_HANDLE hDevice;
//};
//void FaceDetectionParkingPushImpl::CheckAndUpdateCurrentPlate( RecognizedInfo& newInfo){
// std::lock_guard<std::mutex> guard(plateMutex);
// if (newInfo.getLicensePlate() != currentPlate.getLicensePlate()) {
// int count = 0;
// for (auto& info : queuels) {
// if (info.getLicensePlate() == newInfo.getLicensePlate()) {
// count++;
// }
// }
// //第一次进场 当前车牌就是进来这个,老车牌就是空
// //出场的时候 当前车牌是空, 老车牌是出厂车牌
// if (count >= 3) {
// if(currentPlate.getLicensePlate().length()<=0){
// qInfo()<<"未出场车:"<<currentPlate.getLicensePlate()<<"进场的车牌号:"<<newInfo.getLicensePlate();
// }else {
// qInfo()<<"出场车牌号:"<<currentPlate.getLicensePlate()<<"进场的车牌号:"<<newInfo.getLicensePlate();
// }
// XSDK_HANDLE h_device=parent->getHdevice();
// QMetaObject::invokeMethod(devSnapSynTimer, "stop", Qt::QueuedConnection);
// QObject::disconnect(devSnapSynTimer, &QTimer::timeout, MainWindow::sp_this, nullptr);
// QObject::connect(devSnapSynTimer, &QTimer::timeout, MainWindow::sp_this, [this, h_device]() {
// this->parent->SdkRealTimeDevSnapSyn(h_device);
// }, Qt::QueuedConnection);
// QMetaObject::invokeMethod(devSnapSynTimer, "start", Qt::QueuedConnection, Q_ARG(int, 15000));
// currentPlate = newInfo;
// }
// }
//}
//void FaceDetectionParkingPushImpl::SdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
// if(hDevice<=0){
// qInfo() << "相机断线";
// return;
// }
// cv::Mat image;
// MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
// int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image);
// Common & instace= Common::getInstance();
// vides_data::requestDeviceStatus reStatus;
// reStatus.sSn=sSn;
// if(ret>0){
// if(!currentConStatus){
// reStatus.status=1;
// reStatus.type=2;
// HttpService httpService(httpUrl);
// vides_data::response*re= httpService.httpPostDeviceStatus(reStatus);
// if(re->code!=0){
// qInfo()<<"请求设备状态失败";
// }
// instace.deleteObj(re);
// }
// currentConStatus=true;
// }else{
// if(currentConStatus){
// reStatus.status=0;
// reStatus.type=2;
// HttpService httpService(httpUrl);
// vides_data::response*re= httpService.httpPostDeviceStatus(reStatus);
// if(re->code!=0){
// qInfo()<<"请求设备状态失败";
// }
// instace.deleteObj(re);
// }
// currentConStatus=false;
// }
// if (image.empty())
// {
// qInfo() << "Failed to read the image";
// return;
// }
// this->updateImage(image, url);
//}
//int XNetSDK_Media_CallBack(XSDK_HANDLE hMedia, int nDataType, int nDataLen, int nParam2, int nParam3, const char* szString, void* pData, int64 pDataInfo, int nSeq, void* pUserData, void* pMsg)
//{
// if (EXSDK_DATA_FORMATE_FRAME == nDataType)
// {
// }
// return 0;
//}
//FaceDetectionParkingPushImpl::FaceDetectionParkingPushImpl(FaceDetectionParkingPush* parent, QString &url,QTimer* devSnapSynTimer,QString &httpUrl,QString &sSn)
// :parent(parent),
// loginParam(new SXSDKLoginParam()),
// sxMediaFaceImageReq(new SXMediaFaceImageReq()),
// url(url),
// devSnapSynTimer(devSnapSynTimer),
// httpUrl(httpUrl),
// sSn(sSn)
//{
//}
//XSDK_HANDLE FaceDetectionParkingPushImpl::SdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout) {
// QByteArray byteArray = sDevId.toUtf8();
// char* cDevid=byteArray.data();
// strcpy(loginParam->sDevId, cDevid);
// loginParam->nDevPort=nDevPort;
// QByteArray byteName = sUserName.toUtf8();
// char* cName=byteName.data();
// strcpy(loginParam->sUserName, cName);
// if(sPassword.length()>0){
// QByteArray bytePassword = sPassword.toUtf8();
// strcpy(loginParam->sPassword, bytePassword.constData());
// }else{
// strcpy(loginParam->sPassword, "");
// }
// loginParam->nCnnType=EDEV_CNN_TYPE_AUTO;
// int loginResult =XSDK_DevLoginSyn(loginParam,nTimeout);
// if(loginResult<0){
// qInfo() << "登录设备失败";
// return loginResult;
// }
// return loginResult;
//}
//void FaceDetectionParkingPushImpl::updateImage(const cv::Mat & frame,const QString &respUrl){
// FaceReconition &faceRecognition = FaceReconition::getInstance();
// LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance();
// //faceRecognition.search(frame,imageHandleList,names);
// if(faceRecognition.doesItExistEmployee(frame)){
// }
// QString lpNumber;
// licensePlateRecogn.licensePlateNumber(frame, lpNumber);
// std::lock_guard<std::mutex> guard(queueMutex);
// if (queuels.size() >= 10) {
// queuels.dequeue();
// }
// RecognizedInfo recognizedInfo(lpNumber,QDateTime::currentMSecsSinceEpoch());
// queuels.enqueue(recognizedInfo);
// this->CheckAndUpdateCurrentPlate(recognizedInfo);
// if(lpNumber.length()>0){
// qDebug()<<QString("识别的车牌号是:%1").arg(lpNumber);
// }else {
// qDebug()<<QString("当前这帧图像未识别车牌");
// }
//}
//XSDK_HANDLE FaceDetectionParkingPushImpl::SdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener) {
// this->hDevice=hDevice;
// return XSDK_DevSetAlarmListener(hDevice,bListener);
//}
//int FaceDetectionParkingPushImpl::CallbackFunction(XSDK_HANDLE hObject,QString &szString) {
// QByteArray && byJson = szString.toLocal8Bit();
// const char * cJson= byJson.data();
// XSDK_CFG::AlarmInfo alarmInfo;
// if (0 == alarmInfo.Parse(cJson))
// {
// const char* buf = alarmInfo.Event.ToString();
// qInfo() << "buf:"<<buf;
// qInfo() << "OnDevAlarmCallback[Dev:" << hObject << "]"
// << "\r\nEvent:" << alarmInfo.Event.Value()
// << "\r\nChannel:" << alarmInfo.Channel.Value()
// << "\r\nStartTime:" << alarmInfo.StartTime.Value()
// << "\r\nStatus:" << alarmInfo.Status.Value();
// }
// else
// {
// qDebug() << "OnDevAlarmCallback[Dev:" << hObject << "][Event:" << szString << "]";
// }
// cv::Mat image;
// MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
// mediaFaceImage->FaceImageCallBack(hObject,sxMediaFaceImageReq->nChannel,image);
// if (image.empty())
// {
// qInfo() << "Failed to read the image";
// return -1;
// }
// this->updateImage(image, url);
//}
//FaceDetectionParkingPushImpl::~FaceDetectionParkingPushImpl() {
// Common & instace= Common::getInstance();
// instace.deleteObj(loginParam);
// instace.deleteObj(sxMediaFaceImageReq);
// XSDK_DevLogout(this->hDevice);
//}
//FaceDetectionParkingPush::FaceDetectionParkingPush(QString &url,QString &httpUrl,QString &sSn,QTimer* devSnapSynTimer, int &channel)
// : m_pImpl(new FaceDetectionParkingPushImpl(this, url,devSnapSynTimer,httpUrl,sSn)),
// channel(channel) {
// currentThread=new CameraThread(this);
// dev_snap_syn_timer=new QTimer(this);
//}
//FaceDetectionParkingPush::~FaceDetectionParkingPush() {
// Common & instace= Common::getInstance();
// instace.deleteObj(currentThread);
// delete m_pImpl;
//}
//int FaceDetectionParkingPush::SdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout) {
// return m_pImpl->SdkDevLoginSyn(sDevId, nDevPort, sUserName, sPassword, nTimeout);
//}
//XSDK_HANDLE FaceDetectionParkingPush::SdkDevSetAlarmListener(XSDK_HANDLE hDevice,int bListener) {
// this->hDevice=hDevice;
// return m_pImpl->SdkDevSetAlarmListener(hDevice, bListener);
//}
//void FaceDetectionParkingPush::initSdkRealTimeDevSnapSyn(int hDevice){
// connect(dev_snap_syn_timer, &QTimer::timeout, this, [this,hDevice]() {
// this->SdkRealTimeDevSnapSyn(hDevice);
// }, Qt::QueuedConnection);
// dev_snap_syn_timer->start(2000);
// }
//void FaceDetectionParkingPush::SdkRealTimeDevSnapSyn(int hDevice){
// auto task =std::bind(&FaceDetectionParkingPushImpl::SdkDevSnapSyn, m_pImpl, hDevice, this->channel);
// this->getCameraThread()->addTask(task);
// this->getCameraThread()->start();
// qDebug() << "SdkRealTimeDevSnapSyn===========";
//}
//int FaceDetectionParkingPush::getHdevice(){
// return hDevice;
//}
//int FaceDetectionParkingPush::CallbackFunction(int hObject,QString &szString) {
// return m_pImpl->CallbackFunction(hObject,szString);
//}
//CameraThread *FaceDetectionParkingPush::getCameraThread(){
// return currentThread;
//}
//FaceDetectionParkingPushImpl *FaceDetectionParkingPush::getImpl(){
// return m_pImpl;
//}
//#ifndef FACEDETECTIONPARKINGPUSHIMPL_H
//#define FACEDETECTIONPARKINGPUSHIMPL_H
//#include "XSDKPublic.h"
//#include "FaceDetectionParkingPush.h"
//#include "XNetSDKDefine.h"
//#include "Common.h"
//#include "CameraThread.h"
//#include "MediaFaceImage.h"
//class FaceDetectionParkingPushImpl {
//public:
// FaceDetectionParkingPushImpl(FaceDetectionParkingPush* parent,QString &framePath, QString &url);
// int SdkInit(QString &szConfigPath, QString &szTempPath);
// XSDK_HANDLE SdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
// XSDK_HANDLE SdkMediaGetFaceImage(XSDK_HANDLE hDevice, int nSeq, int nTimeout);
// int callbackFunction(XSDK_HANDLE hObject, int nMsgId, int nParam1, int nParam2, int nParam3, const char* szString, void* pObject, int64 lParam, int nSeq, void* pUserData, void* pMsg);
// CameraThread *getCameraThread();
//private:
// SXSDKInitParam *pParam;
// SXSDKLoginParam *loginParam;
// SXMediaFaceImageReq *sxMediaFaceImageReq;
// CameraThread *cameraThread;
// QString framePath;
// QString url;
// FaceDetectionParkingPush* parent;
//};
//#endif // FACEDETECTIONPARKINGPUSHIMPL_H
#ifndef FACERECOGNITION_H
#define FACERECOGNITION_H
#include "hyperface.h"
#include <opencv2/opencv.hpp>
#include<QCoreApplication>
#include "herror.h"
#include "LogHandle.h"
#include "VidesData.h"
#include <mutex>
class FaceReconition
{
private:
static FaceReconition* instance;
HContextHandle ctxHandle;
float configConfidence;
std::vector<int32_t>customIds;
FaceReconition();
~FaceReconition();
public:
static FaceReconition& getInstance()
{
static FaceReconition instance;
return instance;
}
void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face);
void initSourceImageMap(std::map<QString,QString>&maps,float confidence);
int featureRemove();
};
#endif // FACERECOGNITION_H
#include "FaceRecognition.h"
FaceReconition::FaceReconition() {}
FaceReconition::~FaceReconition(){
if (ctxHandle != nullptr) {
HF_ReleaseFaceContext(ctxHandle);
ctxHandle = nullptr;
}
}
FaceReconition* FaceReconition::instance = nullptr;
void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float confidence){
featureRemove();
HResult ret;
// 初始化context
#if defined(__arm__) || defined(__ARM_ARCH)
// ARM 平台相关的代码
QString bPath = QCoreApplication::applicationDirPath() + "/model_zip/N1su_5";
#elif defined(__i386__) || defined(__x86_64__)
QString bPath = QCoreApplication::applicationDirPath() + "/model_zip/T1_5";
#else
#error "不支持的架构"
#endif
QByteArray && bypath = bPath.toUtf8();
char* spath = bypath.data();
HString path = spath;
HInt32 option = HF_ENABLE_QUALITY | HF_ENABLE_FACE_RECOGNITION | HF_ENABLE_MASK_DETECT;
HF_DetectMode detMode = HF_DETECT_MODE_IMAGE; // 选择图像模式 即总是检测
// 创建ctx
ret = HF_CreateFaceContextFromResourceFileOptional(path, option, detMode, 5, &ctxHandle);
if (ret != HSUCCEED) {
qInfo() << QString("Create ctx error: %1").arg(ret);
return;
}
// ret = HF_FaceRecognitionThresholdSetting(ctxHandle, 0.36);
// if (ret != HSUCCEED) {
// qInfo() << QString("HF_FaceRecognitionThresholdSetting error: %1").arg(ret);
// return;
// }
customIds.clear();
int i = 0;
for (auto it = maps.begin(); it != maps.end(); ++it,++i) {
const QString& key = it->first;
const QString& value = it->second;
cv::Mat image = cv::imread(value.toStdString());
if (image.empty()) {
qInfo()<< "错误:图像为空或路径不正确,无法加载图像 ";
return;
}
HF_ImageData imageData = {0};
imageData.data = image.data;
imageData.height = image.rows;
imageData.width = image.cols;
imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
this->configConfidence=confidence;
if (ret != HSUCCEED) {
qInfo() << QString("image handle error: %1").arg((long)imageSteamHandle);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) {
qInfo() << QString("initSourceImageMap:未检测到人脸: %1").arg(key);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
HF_FaceFeature feature = {0};
ret = HF_FaceFeatureExtract(ctxHandle, imageSteamHandle, multipleFaceData.tokens[0], &feature);
if (ret != HSUCCEED) {
qInfo() << QString("特征提取出错: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
char* tagName = new char[key.size() + 1];
std::strcpy(tagName, key.toStdString().c_str());
HF_FaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.customId = i;
customIds.push_back( identity.customId);
identity.tag = tagName;
ret = HF_FeaturesGroupInsertFeature(ctxHandle, identity);
if (ret != HSUCCEED) {
qInfo() << QString("插入失败: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
delete[] tagName;
ret = HF_ReleaseImageStream(imageSteamHandle);
if (ret == HSUCCEED) {
imageSteamHandle = nullptr;
qInfo() << QString("mage released");
} else {
qInfo() << QString("image release error: %l").arg(ret);
}
}
}
int FaceReconition::featureRemove(){
if(customIds.size()>0){
for(auto customId:customIds){
HResult ret= HF_FeaturesGroupFeatureRemove(ctxHandle,customId);
qDebug()<<"ret:featureRemove "<<ret;
}
HF_ReleaseFaceContext(ctxHandle);
}
}
void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
HResult ret;
HF_ContextCustomParameter parameter = {0};
HF_ImageData imageData = {0};
imageData.data = source .data;
imageData.height = source.rows;
imageData.width = source.cols;
imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
if (ret != HSUCCEED) {
qInfo()<<QString("image handle error:%1").arg((long) imageSteamHandle,0,10);
return ;
}
HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) {
qDebug()<<QString("search 未检测到人脸");
return ;
}
std::vector<std::vector<float>> features;
// 被搜索的目标这边推荐使用拷贝式的接口来获取特征向量
HInt32 featureNum;
HF_GetFeatureLength(ctxHandle, &featureNum);
for(int j=0;j< multipleFaceData.detectedNum; ++j){
std::vector<float> newfeature(featureNum,0.0f);
ret = HF_FaceFeatureExtractCpy(ctxHandle, imageSteamHandle, multipleFaceData.tokens[j], newfeature.data());
if(ret != HSUCCEED) {
qDebug()<<QString("特征提取出错: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle);
return ;
}
features.push_back(newfeature);
}
int rect=0;
for(auto feat:features){
HF_FaceFeature feature;
feature.size = feat.size();
feature.data = feat.data();
HF_FaceFeatureIdentity searchIdentity = {0};
// HF_FaceFeature featureSearched = {0};
// searchIdentity.feature = &featureSearched;
HFloat confidence;
ret = HF_FeaturesGroupFeatureSearch(ctxHandle, feature, &confidence, &searchIdentity);
if (ret != HSUCCEED) {
qInfo()<<QString("搜索失败: %1").arg(ret);
return ;
}
qDebug()<<QString("搜索置信度: %1").arg(confidence);
qDebug()<<QString("匹配到的tag: %1").arg(searchIdentity.tag);
qDebug()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
// Face Pipeline
ret = HF_MultipleFacePipelineProcess(ctxHandle, imageSteamHandle, &multipleFaceData, parameter);
if (ret != HSUCCEED) {
//printf("pipeline执行失败: %ld", ret);
qInfo()<<QString("pipeline执行失败: %1").arg(ret);
return ;
}
HF_RGBLivenessConfidence livenessConfidence = {0};
ret = HF_GetRGBLivenessConfidence(ctxHandle, &livenessConfidence);
if (ret != HSUCCEED) {
qInfo()<<QString("获取活体数据失败1");
return ;
}
//printf("活体置信度: %f", livenessConfidence.confidence[0]);
qDebug()<<QString("活体置信度====>:%1").arg(livenessConfidence.confidence[0],0,'Q',4);
HF_FaceMaskConfidence maskConfidence = {0};
ret = HF_GetFaceMaskConfidence(ctxHandle, &maskConfidence);
if (ret != HSUCCEED) {
qInfo()<<QString("获口罩数据失败");
return ;
}
HInt32 faceNum;
ret = HF_FeatureGroupGetCount(ctxHandle, &faceNum);
if (ret != HSUCCEED) {
// printf("获取失败");
qInfo()<<QString("获取失败");
return ;
}
//printf("人脸特征数量: %d", faceNum);
if (confidence > configConfidence) {
vides_data::faceRecognitionResult newface;
newface.id=searchIdentity.tag;
newface.x=multipleFaceData.rects[rect].x;
newface.y=multipleFaceData.rects[rect].y;
newface.width=multipleFaceData.rects[rect].width;
newface.height=multipleFaceData.rects[rect].height;
faces.push_back(newface);
}
rect++;
}
ret = HF_ReleaseImageStream(imageSteamHandle);
if (ret == HSUCCEED) {
imageSteamHandle = nullptr;
// printf("image released");
} else {
//printf("image release error: %ld", ret);
qInfo()<<QString("image release error: %1").arg(ret);
}
}
#include "HandleRtsp.h"
HandleRtsp::HandleRtsp(const QString & respUrl,QSemaphore& semaphore,QObject *parent )
:QThread(parent),respUrl(respUrl),semaphore(semaphore),stopFlag(false)
{
frameCounter.store(63);
}
HandleRtsp::~HandleRtsp(){
qInfo()<<QString("HandleResp %1 stopFlag").arg(respUrl);
stopFlag=true;
wait();
}
void HandleRtsp::setFrameCounter(int frameCounter){
this->frameCounter.store(frameCounter);
}
bool HandleRtsp::getFrameCounterStatus(){
return isFrameCounter;
}
//void HandleRtsp::run(){
// semaphore.acquire();
// std::string r_url = respUrl.toStdString();
// cv::VideoCapture cvCapture;
// cvCapture.set(cv::CAP_PROP_FPS, 30);
// int durationInSeconds = 600; // 视频片段时长为10分钟
// qDebug() << QString::fromStdString(r_url);;
// if(!cvCapture.open(r_url, cv::CAP_FFMPEG)){
// emit streamInterrupted(respUrl);
// return;
// }
// int frameCount;
// cv::Mat frame;
// cv::Size frameSize(640, 480);
// std::time_t t = std::time(nullptr);
// Common & instace= Common::getInstance();
// QString video= instace.getVideoOut();
// video.append("/").append(instace.getTimeString(t)).append(".mkv");
// std::string outputFileName =video.toStdString();
// qDebug()<<QString::fromStdString(outputFileName);
// int frame_width = static_cast<int>(cvCapture.get(cv::CAP_PROP_FRAME_WIDTH));
// int frame_height = static_cast<int>(cvCapture.get(cv::CAP_PROP_FRAME_HEIGHT));
// double fps = 30.0; // 可以从cap.get(cv::CAP_PROP_FPS)获取摄像头的帧率
// // int fourcc = cv::VideoWriter::fourcc('M', 'J', 'P', 'G'); // 使用H.264编码器
// cv::VideoWriter writer(outputFileName, cv::VideoWriter::fourcc('H', '2', '6', '4'), fps, cv::Size(frame_width, frame_height), true);
// while (!stopFlag) {
// if(!cvCapture.read(frame)){
// qInfo()<<"emit streamInterrupted(respUrl)";
// emit streamInterrupted(respUrl);
// break;
// }
// if(!frame.empty()){
// writer.write(frame); // 将帧写入视频文件
// //默认64
// if((frameCount && frameCounter.load())==0){
// cv::Mat frame_bgr;
// cv::cvtColor(frame, frame_bgr, cv::COLOR_RGB2BGR);
// emit frameReady(frame_bgr, respUrl);
// }
// frameCount++;
// }
// }
// cvCapture.release();
//}
void HandleRtsp::run() {
cv::VideoCapture cvCapture;
cvCapture.set(cv::CAP_PROP_FPS, 30);
auto start = QDateTime::currentDateTime();
std::string r_url = respUrl.toStdString();
Common &instance = Common::getInstance();
if(!cvCapture.open(r_url, cv::CAP_FFMPEG)){
emit streamInterrupted(respUrl);
return;
}
QString video = instance.getVideoOut();
std::time_t t = std::time(nullptr);
video.append("/").append(instance.getTimeString(t)).append(".mkv");
std::string outputFileName = video.toStdString();
qDebug() << QString::fromStdString(outputFileName);
int frame_width = static_cast<int>(cvCapture.get(cv::CAP_PROP_FRAME_WIDTH));
int frame_height = static_cast<int>(cvCapture.get(cv::CAP_PROP_FRAME_HEIGHT));
double fps = cvCapture.get(cv::CAP_PROP_FPS);
int frameCount;
cv::Mat frame;
cv::VideoWriter writer(outputFileName, cv::VideoWriter::fourcc('H', '2', '6', '4'), fps, cv::Size(frame_width, frame_height), true);
while (!stopFlag) {
if (!cvCapture.read(frame)) {
qInfo() << "emit streamInterrupted(respUrl)";
emit streamInterrupted(respUrl);
break;
}
while (true) {
if (!frame.empty()) {
writer.write(frame); // 写入视频帧
if ((frameCount % frameCounter.load()) == 0) { // Assuming you want to emit signals every 64 frames
cv::Mat frame_bgr;
cv::cvtColor(frame, frame_bgr, cv::COLOR_RGB2BGR);
emit frameReady(frame_bgr, respUrl);
}
frameCount++;
}
auto now =QDateTime::currentDateTime();
auto elapsed = start.secsTo(now);
if (elapsed >= 600 || stopFlag) {
break;
}
}
writer.release();
if (stopFlag) {
break;
}
start = QDateTime::currentDateTime();
}
cvCapture.release(); // 释放摄像头资源
}
#ifndef HANDLERTSP_H
#define HANDLERTSP_H
#include "LogHandle.h"
#include "FaceRecognition.h"
#include "hyper_lpr_sdk.h"
#include "Common.h"
#include <HandleRtsp.h>
#include <QDebug>
#include <QString>
#include <QThread>
#include <ctime>
#include <QSemaphore>
#include <opencv2/opencv.hpp>
#include <QtCore/QAtomicInteger>
class HandleRtsp:public QThread
{
Q_OBJECT
public:
HandleRtsp(const QString & respUrl,QSemaphore& semaphore,QObject *parent=nullptr);
void run () override;
void setFrameCounter(int frameCounter);
bool getFrameCounterStatus();
~HandleRtsp();
signals:
void frameReady(const cv::Mat & frame,const QString &url);
void streamInterrupted(const QString &url);
private:
QString respUrl;
volatile bool stopFlag;
volatile bool isFrameCounter;
QSemaphore & semaphore;
std::map<int,cv::Mat>maps;
QAtomicInteger<int> frameCounter;
};
#endif // HANDLERTSP_H
#ifndef HTTPCLIENT_H
#define HTTPCLIENT_H
#include "Common.h"
#include <QHttpPart>
#include <QSsl>
#include <QObject>
#include <QNetworkAccessManager>
#include <QNetworkReply>
#include <QEventLoop>
#include <QFile>
#include <QTimer>
#include <QTextCodec>
#include <QDir>
#include <QRandomGenerator>
#include <QSslConfiguration>
class HttpClient : public QObject
{
Q_OBJECT
public:
explicit HttpClient(QObject *parent = nullptr);
~HttpClient();
bool get(QNetworkRequest &request);
bool post(QNetworkRequest &request, const QByteArray &data);
bool downloadFile(QNetworkRequest request, const QString& filePath,
QString &fullPathName,QString &extension);
bool uploadFile(QNetworkRequest request,const QString& accessKeyId,
const QString& accessKeySecret,const QString& filePath,
QString & bucketName,QString &securityToken);
QString errorCode() const;
QString errorString() const;
QString text() const;
QString FileName() const;
void setFileName(QString fileName);
private:
void processReply(QNetworkReply *reply);
private:
QNetworkAccessManager *m_networkAccessManager = nullptr;
QNetworkReply::NetworkError m_error;
QString m_text; // 存放网络请求返回的文本字符串
QString m_errorString;//存放错误信息
int m_networkErrorRetry = 0; // 网络错误重试次数
QEventLoop m_eventLoop;
QTimer *m_timer = nullptr;
QByteArray m_data; // 存放网络请求返回的原始数据
};
#endif // HTTPCLIENT_H
#include "HttpService.h"
vides_data::responseStsCredentials HttpService::stsCredentials;
HttpService::HttpService() {
}
HttpService::HttpService(QString httpUrl) {
this->httpUrl=httpUrl;
}
HttpService::~HttpService() {
}
vides_data::response* HttpService::httpPostDeviceStatus(vides_data::requestDeviceStatus & deviceStatus) {
httpUrl.append("/api/v1.0/device/ping");
QJsonObject json;
json.insert("sn",deviceStatus.sSn);
json.insert("type",deviceStatus.type);
json.insert("state",deviceStatus.status);
json.insert("ip_addr",deviceStatus.ip_addr);
QJsonDocument jsonDoc;
jsonDoc.setObject(json);
QByteArray bytearr= jsonDoc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.post(request,bytearr)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qDebug()<<"httpPostDeviceStatus"<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
vides_data::response* HttpService::httpPostRecord(int id,int recongnition_type,QString sn,QString videw_addr){
httpUrl.append("/api/v1.0/recongnition/record");
QJsonObject json;
json.insert("id",id);
json.insert("recongnition_type",recongnition_type);
json.insert("sn",sn);
json.insert("videw_addr",videw_addr);
QJsonDocument jsonDoc;
jsonDoc.setObject(json);
QByteArray bytearr= jsonDoc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.post(request,bytearr)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
void HttpService::setHttpUrl(const QString &httpUrl){
this->httpUrl=httpUrl;
}
vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_data::responseDeviceData&responseData) {
httpUrl.append("/api/v1.0/device/all");
vides_data::response *resp=new vides_data::response();
QUrlQuery query;
query.addQueryItem("sn",serialNumber);
query.addQueryItem("new_token",QString::number(1));
QNetworkRequest request;
QUrl url(httpUrl);
url.setQuery(query);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.get(request)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
QJsonObject dataObj = maps["data"].toObject();
// 处理"sts_credentials"字段
QJsonObject stsCredentialsObj = dataObj["sts_credentials"].toObject();
HttpService::stsCredentials.access_key_id=responseData.sts_credentials.access_key_id = stsCredentialsObj["access_key_id"].toString();
HttpService::stsCredentials.access_key_secret=responseData.sts_credentials.access_key_secret = stsCredentialsObj["access_key_secret"].toString();
HttpService::stsCredentials.bucket=responseData.sts_credentials.bucket = stsCredentialsObj["bucket"].toString();
HttpService::stsCredentials.endpoint=responseData.sts_credentials.endpoint = stsCredentialsObj["endpoint"].toString();
HttpService::stsCredentials.expiration=responseData.sts_credentials.expiration = stsCredentialsObj["expiration"].toString();
HttpService::stsCredentials.security_token=responseData.sts_credentials.security_token = stsCredentialsObj["security_token"].toString();
QJsonArray dataArray = dataObj["list"].toArray();
for (const QJsonValue& value : dataArray) {
vides_data::responseDeviceStatus status;
QJsonObject deviceObject = value.toObject();
status.sSn = deviceObject["sn"].toString();
status.type = static_cast<int8_t>(deviceObject["type"].toInt());
status.merchant_id = static_cast<int8_t>(deviceObject["merchant_id"].toInt());
// 处理"areas"数组
QJsonArray areasArray = deviceObject["areas"].toArray();
for (const QJsonValue& areaValue : areasArray) {
vides_data::responseArea area;
QJsonObject areaObject = areaValue.toObject();
area.bottom_right_corner_x = areaObject["bottom_right_corner_x"].toDouble();
area.bottom_right_corner_y = areaObject["bottom_right_corner_y"].toDouble();
area.top_left_corner_x = areaObject["top_left_corner_x"].toDouble();
area.top_left_corner_y = areaObject["top_left_corner_y"].toDouble();
area.bottom_left_corner_x = areaObject["bottom_left_corner_x"].toDouble();
area.bottom_left_corner_y = areaObject["bottom_left_corner_y"].toDouble();
area.top_right_corner_x = areaObject["top_right_corner_x"].toDouble();
area.top_right_corner_y = areaObject["top_right_corner_y"].toDouble();
status.areas.push_back(area);
}
responseData.list.push_back(status);
}
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
vides_data::response *HttpService::httpLicensePlateRecognition(vides_data::requestLicensePlate &licensePlateRecognition,
std::list<vides_data::responseRecognitionData>&result ){
QNetworkRequest request;
httpUrl.append("/api/v1.0/recongnition/plate");
QJsonObject requestBody;
// 构建"list"数组
QJsonArray jsonArray;
for (const auto& plate : licensePlateRecognition.plates) {
QJsonObject item;
QJsonObject cameraObject {
{"bottom_right_corner_x", plate.areaLocation.bottomRightCornerX},
{"bottom_right_corner_y", plate.areaLocation.bottomRightCornerY},
{"top_left_corner_x", plate.areaLocation.topLeftCornerX},
{"top_left_corner_y", plate.areaLocation.topLeftCornerY},
{"bottom_left_corner_x", plate.areaLocation.bottomLeftCornerX},
{"bottom_left_corner_y", plate.areaLocation.bottomLeftCornerY},
{"top_right_corner_x", plate.areaLocation.topRightCornerX},
{"top_right_corner_y", plate.areaLocation.topRightCornerY}
};
item.insert("camera_location", cameraObject);
item.insert("img", QJsonValue::fromVariant(plate.img)); // 替换为真实的图像数据
QJsonObject locationObject {
{"bottom_right_corner_x", plate.recognition.bottomRightCornerX},
{"bottom_right_corner_y", plate.recognition.bottomRightCornerY},
{"top_left_corner_x", plate.recognition.topLeftCornerX},
{"top_left_corner_y", plate.recognition.topLeftCornerY},
{"bottom_left_corner_x", plate.recognition.bottomLeftCornerX},
{"bottom_left_corner_y", plate.recognition.bottomLeftCornerY},
{"top_right_corner_x", plate.recognition.topRightCornerX},
{"top_right_corner_y", plate.recognition.topRightCornerY}
};
item.insert("location", locationObject);
item.insert("new_color", plate.new_color); // 替换为真实的颜色数据
item.insert("new_plate", plate.new_plate); // 使用LicensePlate结构中的车牌号字段
item.insert("time", QJsonValue::fromVariant(plate.time)); // 替换为真实的时间数据
jsonArray.append(item);
}
// 将"list"数组添加到JSON对象中
requestBody.insert("list", jsonArray);
// 添加其他字段
requestBody.insert("sn", licensePlateRecognition.sn);
QJsonDocument doc(requestBody);
QByteArray bytearr= doc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response();
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.post(request,bytearr)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
QJsonObject data = map["data"].toJsonObject();
QJsonArray dataList = data["list"].toArray(); // 获取 "list" 数组
for (const auto& item : dataList) {
QJsonObject currentItem = item.toObject();
vides_data::responseRecognitionData res;
res.id = currentItem["id"].toInt();
res.inTime = currentItem["in_time"].toInt();
res.outTime = currentItem["out_time"].toInt();
res.recognitionType = currentItem["recongnition_type"].toInt();
res.sn = currentItem["sn"].toString();
result.push_back(res);
// 将 res 添加到结果列表或进行其他操作
}
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
vides_data::response* HttpService::httpFindFaceReconition(QString &serialNumber, std::list<vides_data::responseFaceReconition *> &datas){
QNetworkRequest request;
httpUrl.append("/api/v1.0/recongnition/face_list");
vides_data::response *resp=new vides_data::response();
QUrlQuery query;
query.addQueryItem("sn",serialNumber);
QUrl url(httpUrl);
url.setQuery(query);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.get(request)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
QJsonArray dataArray = map["data"].toJsonArray(); // 将"data"字段转换为QJsonArray
for (const QJsonValue& value : dataArray) {
vides_data::responseFaceReconition *res=new vides_data::responseFaceReconition();
QJsonObject dataObject = value.toObject(); // 将数组中的每个元素转换为QJsonObject
res->id= QString::number(dataObject["id"].toInt());
res->img = dataObject["img"].toString();
datas.push_back(res);
}
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &number,QString sn,qint64 time){
httpUrl.append("/api/v1.0/recongnition/population");
QJsonObject json;
json.insert("img", QJsonValue::fromVariant(img));
json.insert("sn",sn);
json.insert("number",number);
json.insert("time",QJsonValue::fromVariant(time));
QJsonDocument jsonDoc;
jsonDoc.setObject(json);
QByteArray bytearr= jsonDoc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.post(request,bytearr)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =maps.toVariantMap();
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
vides_data::response *HttpService::httpPostFaceReconition(vides_data::requestFaceReconition & faceReconition){
httpUrl.append("/api/v1.0/recongnition/face");
QJsonObject json;
json.insert("id",QJsonValue::fromVariant(faceReconition.id.toInt()));
json.insert("img", QJsonValue::fromVariant(faceReconition.img));
json.insert("sn",faceReconition.sn);
json.insert("time",QJsonValue::fromVariant(faceReconition.time));
// 创建 location 对象
QJsonObject location;
location.insert("bottom_right_corner_x", faceReconition.area.bottom_right_corner_x);
location.insert("bottom_right_corner_y", faceReconition.area.bottom_right_corner_y);
location.insert("top_left_corner_x",faceReconition.area.top_left_corner_x);
location.insert("top_left_corner_y",faceReconition.area.top_left_corner_y);
location.insert("bottom_left_corner_x", faceReconition.area.bottom_left_corner_x);
location.insert("bottom_left_corner_y", faceReconition.area.bottom_left_corner_y);
location.insert("top_right_corner_x",faceReconition.area.top_right_corner_x);
location.insert("top_right_corner_y",faceReconition.area.top_right_corner_y);
// 将 location 对象插入到主 json 对象中
json.insert("location", location);
QJsonDocument jsonDoc;
jsonDoc.setObject(json);
QByteArray bytearr= jsonDoc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
if(m_httpClient.post(request,bytearr)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
resp->code=2;
resp->msg=m_httpClient.errorString();
//resp->msg=OPERATION_FAILED;
}
return resp;
}
vides_data::response*HttpService::httpFindGb28181Config(QString &serialNumber){
httpUrl.append("/api/v1.0/gb28181/config");
vides_data::response *resp=new vides_data::response();
QUrlQuery query;
query.addQueryItem("sn",serialNumber);
QNetworkRequest request;
QUrl url(httpUrl);
url.setQuery(query);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.get(request)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
vides_data::responseGb28181 *response=new vides_data::responseGb28181();
QJsonObject data = map["data"].toJsonObject(); // 将"data"字段转换为QJsonArray
response->sip_ip = data["sip_ip"].toString();
QString sip=data["sip_port"].toString();
response->sip_port =sip.toInt();
response->serial = data["serial"].toString();
response->realm = data["realm"].toString();
response->username = data["username"].toString();
response->password = data["password"].toString();
response->register_validity = data["register_validity"].toInt();
response->heartbeat_interval = data["heartbeat_interval"].toInt();
response->device_id = data["device_id"].toString();
response->channel_id = data["channel_id"].toString();
resp->data=response;
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=m_httpClient.errorString();
}
return resp;
}
vides_data::response*HttpService::httpFindStream(QString &serialNumber){
httpUrl.append("/api/v1.0/stream");
vides_data::response *resp=new vides_data::response();
QUrlQuery query;
query.addQueryItem("sn",serialNumber);
QNetworkRequest request;
QUrl url(httpUrl);
url.setQuery(query);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.get(request)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =maps.toVariantMap();
resp->code=map["code"].toInt();
QJsonObject data = map["data"].toJsonObject(); // 将"data"字段转换为QJsonArray
QString pushStream = data["push_stream"].toString();
QByteArray byteArray = pushStream.toUtf8();
resp->data=byteArray.data();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
vides_data::response *HttpService::httpDownload( const QString &filePath,QString &fullPathName){
vides_data::response *resp=new vides_data::response();
QUrl url(httpUrl);
QNetworkRequest request;
request.setUrl(url);
QString fileName = url.fileName();
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.downloadFile(request,filePath,fullPathName,fileName)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =maps.toVariantMap();
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
vides_data::response*HttpService::httpUploadFile(const QString &filePath,QString& accessKeyId,
QString& accessKeySecret,QString & bucketName,QString &securityToken){
vides_data::response *resp=new vides_data::response();
QUrl url(httpUrl);
QNetworkRequest request;
request.setUrl(url);
QMutexLocker locker(&m_httpClientMutex);
if(m_httpClient.uploadFile(request,accessKeyId,accessKeySecret,filePath,bucketName,securityToken)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =maps.toVariantMap();
resp->code=map["code"].toInt();
resp->msg=map["message"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
#ifndef HTTPSERVICE_H
#define HTTPSERVICE_H
#include "LogHandle.h"
#include "HttpClient.h"
#include "VidesData.h"
#include "Common.h"
#include <list>
#include <QJsonObject>
#include <QJsonArray>
#include <QJsonDocument>
#include <QUrlQuery>
#include <QMutex>
#include <QMutexLocker>
const QString OPERATION_FAILED="操作失败";
const QString OPERATION_SUCCESS="操作成功";
class HttpService{
public:
HttpService();
HttpService(QString httpUrl);
//单次出⻔成功上报接⼝
vides_data::response* httpPostDeviceStatus(vides_data::requestDeviceStatus & deviceStatus);
vides_data::response* httpPostRecord(int id,int recongnition_type,QString sn,QString videw_addr);
vides_data::response *httpFindCameras(QString &serialNumber,vides_data::responseDeviceData&datas);
//车牌识别结果推送
vides_data::response *httpLicensePlateRecognition(vides_data::requestLicensePlate &licensePlateRecognition,
std::list<vides_data::responseRecognitionData>&result
);
//人脸列表
vides_data::response *httpFindFaceReconition(QString &serialNumber,std::list<vides_data::responseFaceReconition*>&datas);
//人脸识别推送
vides_data::response *httpPostFaceReconition(vides_data::requestFaceReconition & faceReconition);
//人数变化推送
vides_data::response *httpPostFacePopulation(QByteArray &img,int &number,QString sn,qint64 time);
//客户端组列表
vides_data::response *httpFindStream(QString &serialNumber);
vides_data::response *httpFindGb28181Config(QString &serialNumber);
vides_data::response *httpDownload( const QString &filePath,QString &fullPathName);
vides_data::response *httpUploadFile(const QString &filePath,QString& accessKeyId,QString& accessKeySecret,
QString & bucketName,QString &securityToken);
void setHttpUrl(const QString & httpUrl);
static vides_data::responseStsCredentials stsCredentials;
~HttpService();
private:
QString httpUrl;
HttpClient m_httpClient;
QMutex m_httpClientMutex;
};
#endif // HTTPSERVICE_H
#include "HttpClient.h"
HttpClient::HttpClient(QObject *parent)
: QObject(parent)
{
m_networkAccessManager = new QNetworkAccessManager(this);
m_timer = new QTimer(this);
m_timer->setInterval(4000);
m_timer->setSingleShot(true);
connect(m_timer, SIGNAL(timeout()), &m_eventLoop, SLOT(quit()));
}
HttpClient::~HttpClient()
{
delete m_networkAccessManager;
delete m_timer;
}
bool HttpClient::get(QNetworkRequest &request)
{
QUrl url = request.url();
// 转换 URL 为 QString
QString urlString = url.toString();
// 检查 URL 字符串是否包含特定的子字符串
QNetworkReply *reply;
bool success = false;
request.setPriority(QNetworkRequest::HighPriority);
for(int i = 0 ; i < m_networkErrorRetry + 1; i++)//
{
reply = m_networkAccessManager->get(request);
processReply(reply);
if (m_error == QNetworkReply::NoError)
{
success = true;
break;
}
}
return success;
}
bool HttpClient::post(QNetworkRequest &request, const QByteArray &data)
{
QNetworkReply *reply;
bool success = false;
request.setPriority(QNetworkRequest::HighPriority);
for (int i = 0; i < m_networkErrorRetry + 1; i++)
{
reply = m_networkAccessManager->post(request, data);
processReply(reply);
if (m_error == QNetworkReply::NoError)
{
success = true;
break;
}
}
return success;
}
bool HttpClient::downloadFile(QNetworkRequest request, const QString &filePath,
QString &fullPathName,QString &extension)
{
bool success = false;
QDir dir(filePath);
if (!dir.exists()) {
dir.mkpath(".");
}
Common & instace= Common::getInstance();
QString filePa = QDir::toNativeSeparators(filePath +extension);
QSslConfiguration config= QSslConfiguration::defaultConfiguration();
config.setProtocol(QSsl::AnyProtocol);
config.setPeerVerifyMode(QSslSocket::VerifyNone);
request.setSslConfiguration(config);
QNetworkReply* reply = m_networkAccessManager->get(request);
reply->ignoreSslErrors();
QEventLoop loop;
QObject::connect(reply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
QFile file(filePa);
if (!file.open(QIODevice::WriteOnly)) {
qDebug() << "Failed to open file for writing";
reply->deleteLater(); // 确保释放网络回复资源
return false;
}
QObject::connect(reply, &QNetworkReply::readyRead, [&]() {
file.write(reply->readAll());
});
QObject::connect(reply, &QNetworkReply::finished, [&, reply]() {
if (reply->error() != QNetworkReply::NoError) {
qDebug() << "Download failed:" << reply->errorString();
file.remove(); // 删除不完整的文件
} else {
success = true;
file.flush();
file.close();
}
reply->deleteLater();
});
loop.exec();
fullPathName = filePa;
return success;
}
bool HttpClient::uploadFile(QNetworkRequest request,const QString& accessKeyId,
const QString& accessKeySecret,
const QString &filePath,QString & bucketName,QString &securityToken){
bool success = false;
QFile *file = new QFile(filePath, this);
qDebug() << "filePath"<<filePath;
if (!file->open(QIODevice::ReadOnly)) {
qDebug() << "uploadFile Failed to open file for reading";
return false;
}
QByteArray fileData = file->readAll();
if(fileData.isEmpty()){
qDebug() << "uploadFile = file.readAll()";
return false;
}
QFileInfo fileInfo(file->fileName());
QString fileName = fileInfo.fileName(); // 获取文件名,包括扩展名
QSslConfiguration config = QSslConfiguration::defaultConfiguration();
config.setProtocol(QSsl::AnyProtocol);
config.setPeerVerifyMode(QSslSocket::VerifyNone);
request.setSslConfiguration(config);
Common & instace= Common::getInstance();
// 获取当前的UTC时间
QDateTime now = QDateTime::currentDateTimeUtc();
// 设置英文环境,确保月份和星期格式正确
QLocale englishLocale(QLocale::English, QLocale::UnitedStates);
// 使用英文环境格式化日期时间为GMT字符串
QString gmtDateString = englishLocale.toString(now, "ddd, dd MMM yyyy HH:mm:ss 'GMT'");
QString oSSHeaders="x-oss-security-token:";
oSSHeaders.append(securityToken);
QString signature= instace.generateSignature(accessKeySecret,"PUT","","video/mp4",gmtDateString,oSSHeaders,"/"+bucketName+"/"+fileName);
request.setRawHeader("Date", gmtDateString.toUtf8());
QString authHeaderValue = "OSS " + accessKeyId + ":" + signature;
request.setRawHeader("Authorization", authHeaderValue.toUtf8());
request.setRawHeader("X-Oss-Security-Token",securityToken.toUtf8());
request.setHeader(QNetworkRequest::ContentTypeHeader, "video/mp4");
request.setHeader(QNetworkRequest::ContentLengthHeader, QString::number(fileData.size()).toUtf8());
QNetworkReply *reply = m_networkAccessManager->put(request,fileData);
reply->ignoreSslErrors();
QEventLoop loop;
connect(reply, &QNetworkReply::finished, this, [reply, file,&loop, &success]() {
if (reply->error() == QNetworkReply::NoError) {
qDebug() << "Upload successful!";
file->remove();
success=true;
} else {
qDebug() << "Upload failed:" << reply->errorString();
}
file->close();
file->deleteLater();
reply->deleteLater();
loop.quit();
});
// 启动事件循环,等待请求完成
loop.exec();
return success;
}
void HttpClient::processReply(QNetworkReply *reply)
{
connect(reply, SIGNAL(finished()), &m_eventLoop, SLOT(quit()));
m_text.clear();
m_timer->start();
m_eventLoop.exec();
if (m_timer->isActive())
{
m_timer->stop();
m_error = reply->error();
m_errorString = reply->errorString();
if (reply->bytesAvailable() > 0)
{
m_data = reply->readAll();
QTextCodec *codec = QTextCodec::codecForHtml(m_data, QTextCodec::codecForName("utf-8"));
if (codec)
m_text = codec->toUnicode(m_data);
}
else
{
m_data.clear();
m_text.clear();
}
}
else
{
reply->abort();
m_error = QNetworkReply::TimeoutError;
}
delete reply;
}
QString HttpClient::errorString() const{
return m_errorString;
}
QString HttpClient::errorCode() const
{
switch (m_error)
{
case QNetworkReply::NoError: return "NoError";
case QNetworkReply::ConnectionRefusedError: return "ConnectionRefusedError";
case QNetworkReply::RemoteHostClosedError: return "RemoteHostClosedError";
case QNetworkReply::HostNotFoundError: return "HostNotFoundError";
case QNetworkReply::TimeoutError: return "TimeoutError";
case QNetworkReply::OperationCanceledError: return "OperationCanceledError";
case QNetworkReply::SslHandshakeFailedError: return "SslHandshakeFailedError";
case QNetworkReply::TemporaryNetworkFailureError: return "TemporaryNetworkFailureError";
case QNetworkReply::ProxyConnectionRefusedError: return "ProxyConnectionRefusedError";
case QNetworkReply::ProxyConnectionClosedError: return "ProxyConnectionClosedError";
case QNetworkReply::ProxyNotFoundError: return "ProxyNotFoundError";
case QNetworkReply::ProxyTimeoutError: return "ProxyTimeoutError";
case QNetworkReply::ProxyAuthenticationRequiredError: return "ProxyAuthenticationRequiredError";
case QNetworkReply::ContentAccessDenied: return "ContentAccessDenied";
case QNetworkReply::ContentOperationNotPermittedError: return "ContentOperationNotPermittedError";
case QNetworkReply::ContentNotFoundError: return "ContentNotFoundError";
case QNetworkReply::AuthenticationRequiredError: return "AuthenticationRequiredError";
case QNetworkReply::ContentReSendError: return "ContentReSendError";
case QNetworkReply::ProtocolUnknownError: return "ProtocolUnknownError";
case QNetworkReply::ProtocolInvalidOperationError: return "ProtocolInvalidOperationError";
case QNetworkReply::UnknownNetworkError: return "UnknownNetworkError";
case QNetworkReply::UnknownProxyError: return "UnknownProxyError";
case QNetworkReply::UnknownContentError: return "UnknownContentError";
case QNetworkReply::ProtocolFailure: return "ProtocolFailure";
}
return "UnknownError";
}
QString HttpClient::text() const
{
return m_text;
}
#include "HumanDetection.h"
HumanDetection* HumanDetection::instance = nullptr;
HumanDetection::HumanDetection(){
}
HumanDetection::~HumanDetection(){
}
int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector *detector){
TCV_CameraStream *stream = TCV_CreateCameraStream();
TCV_CameraStreamSetData(stream, source.data, source.cols, source.rows);
TCV_CameraStreamSetRotationMode(stream, TCV_CAMERA_ROTATION_0);
TCV_CameraStreamSetStreamFormat(stream, TCV_STREAM_BGR);
//0是人 1是车
// 执行一帧目标检测
TCV_HumanDetectorProcessFrame(detector, stream);
int num = (res == 0) ? TCV_HumanDetectorGetNumOfHuman(detector) : 1;//TCV_HumanDetectorGetNumOfCar(detector);
qDebug() << (res == 0 ? "Number of people detected:" : "Number of cars detected:") << num;
TCV_ReleaseCameraStream(stream);
return num;
}
#ifndef HUMANDETECTION_H
#define HUMANDETECTION_H
#include "so_human_sdk.h"
#include <opencv2/opencv.hpp>
#include <QDebug>
class HumanDetection
{
public:
HumanDetection();
~HumanDetection();
void initDetector();
int findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector *detector);
static HumanDetection& getInstance()
{
static HumanDetection instance;
return instance;
}
private:
static HumanDetection* instance;
};
#endif // HUMANDETECTION_H
#include "Common.h"
#include "LicensePlateRecognition.h"
LicensePlateRecognition::LicensePlateRecognition() {}
LicensePlateRecognition::~LicensePlateRecognition(){
}
LicensePlateRecognition* LicensePlateRecognition::instance = nullptr;
//void LicensePlateRecognition::initHlprContext(const QString &modelPaths, const QString &carCascade, float carConfidence){
// HLPR_ContextConfiguration configuration = {0};
// QByteArray && by_mpath=modelPaths.toUtf8();
// char* m_path=by_mpath.data();
// configuration.models_path = m_path;
// configuration.max_num = 5;
// configuration.det_level = DETECT_LEVEL_LOW;
// configuration.use_half = false;
// configuration.nms_threshold = 0.5f;
// configuration.rec_confidence_threshold = carConfidence;
// configuration.box_conf_threshold = 0.30f;
// configuration.threads = 1;
// this->carCascadeUrl=carCascade;
// ctx = HLPR_CreateContext(&configuration);
//}
void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber){
HLPR_ImageData data = {0};
data.data = source.data;
data.width = source.cols;
data.height = source.rows;
data.format = STREAM_BGR;
data.rotation = CAMERA_ROTATION_0;
// create DataBuffer
P_HLPR_DataBuffer buffer = HLPR_CreateDataBuffer(&data);
// 检测图片数据流是否正确,如果出现崩溃或图像不符合预期 则表示数据流格式异常导致内部编码出错
HLPR_DataBufferTest(buffer, "test_buffer.jpg");
// create context
HLPR_ContextConfiguration configuration = {0};
QByteArray && by_mpath=modelPaths.toUtf8();
char* m_path=by_mpath.data();
configuration.models_path = m_path;
configuration.max_num = 5;
configuration.det_level = DETECT_LEVEL_LOW;
configuration.use_half = false;
configuration.nms_threshold = 0.5f;
configuration.rec_confidence_threshold = 0.8f;
configuration.box_conf_threshold = 0.30f;
configuration.threads = 1;
P_HLPR_Context ctx1 = HLPR_CreateContext(&configuration);
HREESULT ret = HLPR_ContextQueryStatus(ctx1);
if (ret != HResultCode::Ok) {
printf("create error.\n");
return ;
}
// exec plate recognition
HLPR_PlateResultList results = {0};
HLPR_ContextUpdateStream(ctx1, buffer, &results);
QStringList lpResults;
for (int i = 0; i < results.plate_size; ++i) {
std::string type;
if (results.plates[i].type == HLPR_PlateType::PLATE_TYPE_UNKNOWN) {
type = "未知";
} else {
type = types[results.plates[i].type];
}
qDebug()<<QString("车牌号:%1").arg(results.plates[i].code);
QString plateResult = QString("第%1个,%2,车牌号:%3,置信度:%4,左上角点x坐标:%5,左上角点y坐标:%6,右下角点x坐标:%7,右下角点y坐标:%8")
.arg(i + 1).arg(type.c_str()).arg(results.plates[i].code)
.arg(results.plates[i].text_confidence).arg(results.plates[i].x1, 0, 'Q', 4)
.arg(results.plates[i].y1, 0, 'Q', 4).arg(results.plates[i].x2, 0, 'Q', 4)
.arg(results.plates[i].y2, 0, 'Q', 4);
lpResults.append(plateResult);
}
lpNumber =lpResults.join("\n");
// release buffer
HLPR_ReleaseDataBuffer(buffer);
// release context
HLPR_ReleaseContext(ctx1);
}
void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime,P_HLPR_Context ctx) {
// 执行一帧图像数据检测行人
// create ImageData
HLPR_ImageData data = {0};
data.data = source.data;
data.width = source.cols;
data.height = source.rows;
data.format = STREAM_BGR;
data.rotation = CAMERA_ROTATION_0;
// create DataBuffer
P_HLPR_DataBuffer buffer = HLPR_CreateDataBuffer(&data);
HREESULT ret = HLPR_ContextQueryStatus(ctx);
if (ret != HResultCode::Ok) {
qInfo()<<QString("create error");
return ;
}
// exec plate recognition
HLPR_PlateResultList results = {0};
HLPR_ContextUpdateStream(ctx, buffer, &results);
if (results.plate_size <= 0) {
HLPR_ReleaseDataBuffer(buffer);
return;
}
QStringList lpResults;
for (int i = 0; i < results.plate_size; ++i) {
std::string type;
if (results.plates[i].type == HLPR_PlateType::PLATE_TYPE_UNKNOWN) {
type = "未知";
} else {
type = types[results.plates[i].type];
}
vides_data::LicensePlate newPlate;
newPlate.time=currentTime;
newPlate.new_color=QString::fromStdString(type);
newPlate.new_plate=QString::fromUtf8(results.plates[i].code);
vides_data::ParkingArea area;
area.topLeftCornerX=results.plates[i].x1;
area.topLeftCornerY=results.plates[i].y1;
area.bottomLeftCornerX=results.plates[i].x1;
area.bottomLeftCornerY=results.plates[i].y2;
area.topRightCornerX=results.plates[i].x2; // 修改这里
area.topRightCornerY=results.plates[i].y1; // 修改这里
area.bottomRightCornerX=results.plates[i].x2;
area.bottomRightCornerY=results.plates[i].y2;
newPlate.recognition=area;
QString plateResult = QString("第%1个,%2,车牌号:%3,置信度:%4,左上角点x坐标:%5,左上角点y坐标:%6,右下角点x坐标:%7,右下角点y坐标:%8")
.arg(i + 1).arg(type.c_str()).arg(results.plates[i].code)
.arg(results.plates[i].text_confidence).arg(results.plates[i].x1, 0, 'Q', 4)
.arg(results.plates[i].y1, 0, 'Q', 4).arg(results.plates[i].x2, 0, 'Q', 4)
.arg(results.plates[i].y2, 0, 'Q', 4);
//QString plateResult = QString(results.plates[i].code);
plate.plates.push_front(newPlate);
lpResults.append(plateResult);
}
lpNumber =lpResults.join("\t");
HLPR_ReleaseDataBuffer(buffer);
}
#ifndef LICENSEPLATERECOGNITION_H
#define LICENSEPLATERECOGNITION_H
#include "hyper_lpr_sdk.h"
#include "LogHandle.h"
#include "VidesData.h"
#include <QString>
#include <opencv2/opencv.hpp>
#include <QTextStream>
#include <QFile>
#include <QImage>
#include <mutex>
const std::vector<std::string> types =
{"蓝牌", "黄牌单层", "白牌单层", "绿牌新能源", "黑牌港澳",
"香港单层", "香港双层", "澳门单层", "澳门双层", "黄牌双层"};
class LicensePlateRecognition{
public:
static LicensePlateRecognition& getInstance()
{
static LicensePlateRecognition instance;
return instance;
}
//识别车牌号
void licensePlateNumber(const cv::Mat &source,QString & lpNumber, vides_data::requestLicensePlate &plate,
qint64 currentTime,P_HLPR_Context ctx);
void oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber);
// void initHlprContext(const QString &modelPaths,const QString &carCascade,float carConfidence);
private:
static LicensePlateRecognition* instance;
//P_HLPR_Context ctx ;
float carConfidence;
std::mutex carMutex;
LicensePlateRecognition();
~LicensePlateRecognition();
};
#endif // LICENSEPLATERECOGNITION_H
#ifndef LOGHANDLER_H
#define LOGHANDLER_H
#include <iostream>
#include <QDebug>
#include <QDateTime>
#include <QMutexLocker>
#include <QDir>
#include <QFile>
#include <QFileInfo>
#include <QTimer>
#include <QTextStream>
#include <QTextCodec>
const int g_logLimitSize = 5;
struct LogHandlerPrivate {
LogHandlerPrivate();
~LogHandlerPrivate();
// 打开日志文件 log.txt,如果日志文件不是当天创建的,则使用创建日期把其重命名为 yyyy-MM-dd.log,并重新创建一个 log.txt
void openAndBackupLogFile();
void checkLogFiles(); // 检测当前日志文件大小
void autoDeleteLog(); // 自动删除30天前的日志
// 消息处理函数
static void messageHandler(QtMsgType type, const QMessageLogContext &context, const QString &msg);
QDir logDir; // 日志文件夹
QTimer renameLogFileTimer; // 重命名日志文件使用的定时器
QTimer flushLogFileTimer; // 刷新输出到日志文件的定时器
QDate logFileCreatedDate; // 日志文件创建的时间
static QFile *logFile; // 日志文件
static QTextStream *logOut; // 输出日志的 QTextStream,使用静态对象就是为了减少函数调用的开销
static QMutex logMutex; // 同步使用的 mutex
};
class LogHandler {
public:
void installMessageHandler(); // 给Qt安装消息处理函数
void uninstallMessageHandler(); // 取消安装消息处理函数并释放资源
static LogHandler& Get() {
static LogHandler m_logHandler;
return m_logHandler;
}
private:
LogHandler();
LogHandlerPrivate *d;
};
#endif // LOGHANDLER_H
#include "LogHandle.h"
#include <qthread.h>
/************************************************************************************************************
* *
* LogHandlerPrivate *
* *
***********************************************************************************************************/
// 初始化 static 变量
QMutex LogHandlerPrivate::logMutex;
QFile* LogHandlerPrivate::logFile = nullptr;
QTextStream* LogHandlerPrivate::logOut = nullptr;
LogHandlerPrivate::LogHandlerPrivate() {
logDir.setPath("log"); // TODO: 日志文件夹的路径,为 exe 所在目录下的 log 文件夹,可从配置文件读取
QString logPath = logDir.absoluteFilePath("today.log"); // 获取日志的路径
// ========获取日志文件创建的时间========
// QFileInfo::created(): On most Unix systems, this function returns the time of the last status change.
// 所以不能运行时使用这个函数检查创建时间,因为会在运行时变化,于是在程序启动时保存下日志文件的最后修改时间,
logFileCreatedDate = QFileInfo(logPath).lastModified().date(); // 若日志文件不存在,返回nullptr
// 打开日志文件,如果不是当天创建的,备份已有日志文件
openAndBackupLogFile();
// 十分钟检查一次日志文件创建时间
renameLogFileTimer.setInterval(1000*2); // TODO: 可从配置文件读取
renameLogFileTimer.start();
QObject::connect(&renameLogFileTimer, &QTimer::timeout, [this] {
QMutexLocker locker(&LogHandlerPrivate::logMutex);
openAndBackupLogFile(); // 打开日志文件
checkLogFiles(); // 检测当前日志文件大小
// autoDeleteLog(); // 自动删除30天前的日志
});
// 定时刷新日志输出到文件,尽快的能在日志文件里看到最新的日志
flushLogFileTimer.setInterval(1000); // TODO: 可从配置文件读取
flushLogFileTimer.start();
QObject::connect(&flushLogFileTimer, &QTimer::timeout, [] {
// qDebug() << QDateTime::currentDateTime().toString("yyyy-MM-dd hh:mm:ss"); // 测试不停的写入内容到日志文件
QMutexLocker locker(&LogHandlerPrivate::logMutex);
if (nullptr != logOut) {
logOut->flush();
}
});
}
LogHandlerPrivate::~LogHandlerPrivate() {
if (nullptr != logFile) {
logFile->flush();
logFile->close();
delete logOut;
delete logFile;
// 因为他们是 static 变量
logOut = nullptr;
logFile = nullptr;
}
}
// 打开日志文件 log.txt,如果不是当天创建的,则使用创建日期把其重命名为 yyyy-MM-dd.log,并重新创建一个 log.txt
void LogHandlerPrivate::openAndBackupLogFile() {
// 总体逻辑:
// 1. 程序启动时 logFile 为 nullptr,初始化 logFile,有可能是同一天打开已经存在的 logFile,所以使用 Append 模式
// 2. logFileCreatedDate is nullptr, 说明日志文件在程序开始时不存在,所以记录下创建时间
// 3. 程序运行时检查如果 logFile 的创建日期和当前日期不相等,则使用它的创建日期重命名,然后再生成一个新的 log.txt 文件
// 4. 检查日志文件超过 LOGLIMIT_NUM 个,删除最早的
// 备注:log.txt 始终为当天的日志文件,当第二天,会执行第3步,将使用 log.txt 的创建日期重命名它
// 如果日志所在目录不存在,则创建
if (!logDir.exists()) {
logDir.mkpath("."); // 可以递归的创建文件夹
}
QString logPath = logDir.absoluteFilePath("today.log"); // log.txt的路径
// [[1]] 程序每次启动时 logFile 为 nullptr
if (logFile == nullptr) {
logFile = new QFile(logPath);
logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : nullptr;
if (logOut != nullptr)
logOut->setCodec("UTF-8");
// [[2]] 如果文件是第一次创建,则创建日期是无效的,把其设置为当前日期
if (logFileCreatedDate.isNull()) {
logFileCreatedDate = QDate::currentDate();
}
}
if (logFileCreatedDate != QDate::currentDate()) {
logFile->flush();
logFile->close();
delete logOut;
delete logFile;
QString newLogPath = logDir.absoluteFilePath(logFileCreatedDate.toString("yyyy-MM-dd.log"));
QFile::rename(logPath, newLogPath); // 将原始日志文件重命名为新的文件名
logFile = new QFile(logPath);
logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : nullptr;
logFileCreatedDate = QDate::currentDate();
if (logOut != nullptr)
logOut->setCodec("UTF-8");
}
}
// 检测当前日志文件大小
void LogHandlerPrivate::checkLogFiles() {
// 如果 protocal.log 文件大小超过5M,重新创建一个日志文件,原文件存档为yyyy-MM-dd_hhmmss.log
if (logFile->size() > 1024*g_logLimitSize) {
logFile->flush();
logFile->close();
delete logOut;
delete logFile;
QString logPath = logDir.absoluteFilePath("today.log"); // 日志的路径
QString newLogPath = logDir.absoluteFilePath(logFileCreatedDate.toString("yyyy-MM-dd.log"));
QFile::rename(logPath, newLogPath);
logFile = new QFile(logPath);
logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : NULL;
logFileCreatedDate = QDate::currentDate();
if (logOut != nullptr)
logOut->setCodec("UTF-8");
}
}
// 自动删除30天前的日志
void LogHandlerPrivate::autoDeleteLog()
{
QDateTime now = QDateTime::currentDateTime();
// 前30天
QDateTime dateTime1 = now.addDays(-15);
QDateTime dateTime2;
QString logPath = logDir.absoluteFilePath(""); // 日志的路径
QDir dir(logPath);
QStringList filename ;
filename << "*.log";//可叠加,可使用通配符筛选
QFileInfoList fileList = dir.entryInfoList(filename);
foreach (QFileInfo f, fileList) {
// "."和".."跳过
if (f.baseName() == "" || f.baseName()=="today" )
continue;
dateTime2 = QDateTime::fromString(f.baseName(), "yyyy-MM-dd");
if (dateTime2 < dateTime1) { // 只要日志时间小于前30天的时间就删除
dir.remove(f.absoluteFilePath());
}
}
}
// 消息处理函数
void LogHandlerPrivate::messageHandler(QtMsgType type, const QMessageLogContext &context, const QString &msg) {
QMutexLocker locker(&LogHandlerPrivate::logMutex);
QString level;
switch (type) {
case QtDebugMsg:
level = "DEBUG";
break;
case QtInfoMsg:
level = "INFO ";
break;
case QtWarningMsg:
level = "WARN ";
break;
case QtCriticalMsg:
level = "ERROR";
break;
case QtFatalMsg:
level = "FATAL";
break;
default:
break;
}
// 输出到标准输出: Windows 下 std::cout 使用 GB2312,而 msg 使用 UTF-8,但是程序的 Local 也还是使用 UTF-8
#if defined(Q_OS_WIN)
QByteArray localMsg = QTextCodec::codecForName("GB2312")->fromUnicode(msg); //msg.toLocal8Bit();
#else
QByteArray localMsg = msg.toLocal8Bit();
#endif
std::cout << std::string(localMsg) << std::endl;
if (nullptr == LogHandlerPrivate::logOut) {
return;
}
// 输出到日志文件, 格式: 时间 - [Level] (文件名:行数, 函数): 消息
QString fileName = context.file;
int index = fileName.lastIndexOf(QDir::separator());
fileName = fileName.mid(index + 1);
(*LogHandlerPrivate::logOut) << QString("%1 - [%2] (%3:%4, %5): %6\n")
.arg(QDateTime::currentDateTime().toString("yyyy-MM-dd hh:mm:ss")).arg(level)
.arg(fileName).arg(context.line).arg(context.function).arg(msg);
}
/************************************************************************************************************
* *
* LogHandler *
* *
***********************************************************************************************************/
LogHandler::LogHandler() : d(nullptr) {
}
// 给Qt安装消息处理函数
void LogHandler::installMessageHandler() {
QMutexLocker locker(&LogHandlerPrivate::logMutex); // 类似C++11的lock_guard,析构时自动解锁
if (nullptr == d) {
d = new LogHandlerPrivate();
qInstallMessageHandler(LogHandlerPrivate::messageHandler); // 给 Qt 安装自定义消息处理函数
}
}
// 取消安装消息处理函数并释放资源
void LogHandler::uninstallMessageHandler() {
QMutexLocker locker(&LogHandlerPrivate::logMutex);
qInstallMessageHandler(nullptr);
delete d;
d = nullptr;
}
#include "MediaFaceImage.h"
#include "CameraHandle.h"
MediaFaceImage* MediaFaceImage::m_instance = nullptr; // 初始化指针为空
MediaFaceImage::MediaFaceImage()
{
}
MediaFaceImage::~MediaFaceImage()
{
XSDK_UnInit();
}
MediaFaceImage* MediaFaceImage::getInstance()
{
if (m_instance == nullptr) // 检查指针是否为空
{
m_instance = new MediaFaceImage(); // 创建新的实例并指向它
}
return m_instance; // 返回指向实例的指针
}
std::map<int,CameraHandle*>MediaFaceImage::getCurrentDevice(){
return currentDevice;
}
void MediaFaceImage::clearCurrentDevice(int hObject){
currentDevice.erase(hObject);
}
void MediaFaceImage::setMap(int &key,CameraHandle*value){
if(currentDevice.count(key)<=0){
currentDevice.insert(std::make_pair(key, value));
}
}
static int sdkInitCallback(XSDK_HANDLE hObject, int nMsgId, int nParam1,
int nParam2, int nParam3, const char* szString, void* pObject,
int64 lParam, int nSeq, void* pUserData, void* pMsg){
if (pUserData == nullptr) {
qInfo() << "pUserData 为空";
return -1;
}
switch (nMsgId)
{
case ESXSDK_ON_DEV_STATE:
{
printf("ESXSDK_ON_DEV_STATE[%s]\r\n", nParam1 == 6 ? "ESTATE_DEV_Logined" : "ESTATE_DEV_NetDisConnect");
}
break;
case EXSDK_DATA_FORMATE_FRAME:
break;
case EXCMD_ALARM_REQ:
{
MediaFaceImage* mediaFaceImage = static_cast<MediaFaceImage*>(pUserData);
if(mediaFaceImage->getCurrentDevice().count(hObject)>0){
QString qString(szString);
CameraHandle* cameraHandle= mediaFaceImage->getCurrentDevice().at(hObject);
QThreadPool* threadPool = QThreadPool::globalInstance();
auto taskCallBack=std::bind(&CameraHandle::callbackFunction, cameraHandle, hObject, qString);
auto taskRunnable = new TaskRunnable(taskCallBack, hObject,cameraHandle->getChannel(), RunFunction::SdkCallbackFunction);
// task->setAutoDelete(false); // 确保task不会在执行后被自动删除
threadPool->start(taskRunnable);
// if (!threadPool->tryStart(task)) { // 尝试启动任务,如果线程池满了则不会启动
// qDebug() << "线程池已满,无法启动TaskRunnable";
// }
}
}
break;
default:
break;
}
return 0;
}
int MediaFaceImage::SdkSearchDevicesSyn(std::map<QString, vides_data::localDeviceStatus *> &devices){
int nMaxCount = 100;
int nActualCount = 0;
SXSDK_CONFIG_NET_COMMON* pRet = new SXSDK_CONFIG_NET_COMMON[nMaxCount];
memset(pRet, 0, sizeof(SXSDK_CONFIG_NET_COMMON) * nMaxCount);
nActualCount = XSDK_SearchDevicesSyn(pRet, nMaxCount);
printf("nCount:%d\r\n", nActualCount);
if (nActualCount <= 0)
{
qDebug() << QString("Search no Device");
delete[] pRet;
return -1;
}
if (nActualCount >= 0)
{
for (int i = 0; i < nActualCount; i++)
{
qDebug() << QString("[%1][IP:%2.%3.%4.%5][SN:%6][Mac:%7]")
.arg(i)
.arg(pRet[i].HostIP.c[0])
.arg(pRet[i].HostIP.c[1])
.arg(pRet[i].HostIP.c[2])
.arg(pRet[i].HostIP.c[3])
.arg(pRet[i].sSn)
.arg(pRet[i].sMac);
vides_data::localDeviceStatus *pDevice=new vides_data::localDeviceStatus();
pDevice->sSn=QString::fromUtf8(pRet[i].sSn);
pDevice->HostIP=pRet[i].HostIP;
pDevice->TCPPort=pRet[i].TCPPort;
pDevice->HttpPort=pRet[i].HttpPort;
pDevice->UserName= QString::fromUtf8(pRet[i].DefaultUser,sizeof(pRet[i].DefaultUser));
pDevice->password= QString::fromUtf8(pRet[i].DefaultPwd,sizeof(pRet[i].DefaultPwd));
devices.insert(std::make_pair(pDevice->sSn, pDevice));
}
delete[] pRet;
}
return nActualCount;
}
int MediaFaceImage::SdkInit(QString &szConfigPath, QString &szTempPath) {
SXSDKInitParam *pParam=new SXSDKInitParam();
pParam->nLogLevel=8;
QByteArray && byConfigPath=szConfigPath.toLocal8Bit();
strcpy(pParam->szConfigPath, byConfigPath.data());
QByteArray && byTempPath = szTempPath.toLocal8Bit();
strcpy(pParam->szTempPath, byTempPath.data());
SMsgReceiver sms(nullptr,sdkInitCallback,this);
pParam->mainMsgCallBack=sms;
int initResult= XSDK_Init(pParam);
if(initResult<0){
qInfo() << "sdk 初始化失败";
return initResult;
}
return initResult;
}
void MediaFaceImage::ParserImageData(const unsigned char* pData, int nDataLen, char* pJpg, int* nJpgLen, char* pJson)
{
quint32 nPicLen = static_cast<quint32>(pData[0]) | (static_cast<quint32>(pData[1]) << 8) | (static_cast<quint32>(pData[2]) << 16) | (static_cast<quint32>(pData[3]) << 24);
qDebug() << "nPicLen =" << nPicLen;
*nJpgLen = static_cast<int>(nPicLen);
memcpy(pJpg, (pData + 32), *nJpgLen);
const unsigned char* pInfoHead = nullptr;
for (int i = nDataLen - 2; i > -1; i--)
{
if (pData[i] == 0xff && pData[i + 1] == 0xd9)
{
pInfoHead = pData + i;
break;
}
}
if (pInfoHead != nullptr)
{
strcpy(pJson, reinterpret_cast<const char*>(pInfoHead + 2));
}
}
int MediaFaceImage::AbFile(const char* pFileName, const void* pData, int nLength) {
if (pData == NULL || nLength <= 0) {
return -2;
}
FILE* fp = fopen(pFileName, "ab+");
if (fp == NULL) {
// 文件打开失败
return -1;
}
size_t written = fwrite(pData, 1, nLength, fp);
if (written != nLength) {
// 写入的数据长度与预期不符
fclose(fp);
return -3;
}
fflush(fp); // 确保数据被写入
fclose(fp);
return 0;
}
int MediaFaceImage::ToFile(const char* pFileName, const void* pData, int nLength)
{
if (pData == NULL || nLength <= 0)
{
// 数据指针为空或长度不正确
return -2;
}
FILE* fp = fopen(pFileName, "wb");
if (fp == NULL)
{
// 文件打开失败
return -1;
}
size_t written = fwrite(pData, 1, nLength, fp);
if (written != nLength)
{
// 写入的数据长度与预期不符
fclose(fp);
return -3;
}
fflush(fp); // 确保数据被写入
fclose(fp);
return 0;
}
int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat &image) {
const int BufferSize = 1024 * 1024 * 2; // 定义缓冲区大小
// 使用智能指针管理资源
std::unique_ptr<unsigned char[]> pOutBuffer(new unsigned char[BufferSize]);
int pInOutBufferSize = 0;
int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer.get(), &pInOutBufferSize);
if (ret < 0 || pInOutBufferSize <= 0) {
qInfo() << "同步设备端抓图失败";
return -1;
}
// 使用vector管理buffer
std::vector<uchar> buffer(pInOutBufferSize);
memcpy(buffer.data(), pOutBuffer.get(), pInOutBufferSize);
image = cv::imdecode(buffer, cv::IMREAD_UNCHANGED);
return pInOutBufferSize; // pOutBuffer由智能指针管理,此处无需手动释放
}
//int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat &image)
//{
// // static const int BufferSize = 1024 * 1024 * 2;
// // static unsigned char pOutBuffer[BufferSize];
// const int BufferSize = 1024 * 1024 * 2;
// unsigned char* pOutBuffer = new unsigned char[BufferSize];
// int pInOutBufferSize = 0;
// int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer, &pInOutBufferSize);
// if (ret < 0 || pInOutBufferSize<=0 ) {
// qInfo() << "同步设备端抓图失败";
// if (pOutBuffer)
// {
// delete[]pOutBuffer;
// pOutBuffer = nullptr;;
// }
// return -1;
// }
// std::vector<uchar> buffer(pInOutBufferSize);
// memcpy(buffer.data(), pOutBuffer, pInOutBufferSize);
// image =std::move(cv::imdecode(buffer, cv::IMREAD_UNCHANGED));;
// if (pOutBuffer)
// {
// delete[]pOutBuffer;
// pOutBuffer = nullptr;;
// }
// return pInOutBufferSize;
//}
int MediaFaceImage::CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer){
static const int BufferSize = 1024 * 1024 * 2; // 2MB buffer size
static unsigned char pOutBuffer[BufferSize];
// 初始化为0,用于接收实际填充的大小
int pInOutBufferSize = 0;
// 尝试从设备获取快照数据
int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer, &pInOutBufferSize);
if (ret < 0 || pInOutBufferSize <= 0) {
qInfo() << "同步设备端抓图失败";
return -1; // 返回-1表示失败
}
// 用pOutBuffer里的数据初始化vector,复制数据到vector中
buffer = std::vector<uchar>(pOutBuffer, pOutBuffer + pInOutBufferSize);
// 返回实际填入Vector的数据大小
return pInOutBufferSize;
}
#ifndef MEDIAFACEIMAGE_H
#define MEDIAFACEIMAGE_H
#include "XSDKPublic.h"
#include "XNetSDKSyn.h"
#include "XNetSDKDefine.h"
#include "VidesData.h"
#include "Common.h"
#include "TaskRunnable.h"
#include <memory>
#include <map>
#include <QDebug>
#include <QThreadPool>
#include <opencv2/opencv.hpp>
class CameraHandle;
class MediaFaceImage
{
public:
static MediaFaceImage* getInstance(); // 单例模式获取实例的静态成员函数
void ParserImageData(const unsigned char* pData, int nDataLen, char* pJpg, int* nJpgLen, char* pJson);
int FaceImageCallBack(XSDK_HANDLE hMedia,int nChannel,cv::Mat &image);
int CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer);
int ToFile(const char* pFileName, const void* pData, int nLenght);
int AbFile(const char* pFileName, const void* pData, int nLenght);
int SdkSearchDevicesSyn(std::map< QString,vides_data::localDeviceStatus*>& devices);
int SdkInit(QString &szConfigPath, QString &szTempPath);
std::map<int,CameraHandle*>getCurrentDevice();
void clearCurrentDevice(int hObject);
void setMap(int &key,CameraHandle*value);
private:
MediaFaceImage(); // 构造函数声明为私有
~MediaFaceImage(); // 析构函数声明为私有
std::map<int,CameraHandle*>currentDevice;
static MediaFaceImage* m_instance; // 指向实例的指针
};
#endif // MEDIAFACEIMAGE_H
#ifndef MYWRAPPER_H
#define MYWRAPPER_H
class MyWrapper {
public:
MyWrapper();
~MyWrapper();
void doSomething(int hObject,int nMsgId, int nParam1, int nParam2, int nParam3, const char* szString, void* pObject, long long lParam, int nSeq, void* pUserData, void* pMsg);
private:
class Impl;
Impl* m_pImpl;
};
#endif // MYWRAPPER_H
#include "NewHttpService.h"
NewHttpService::NewHttpService() {
}
NewHttpService::~NewHttpService() {
}
vides_data::response* NewHttpService::httpPostDeviceStatus(vides_data::requestDeviceStatus & deviceStatus) {
httpUrl.append("/api/v1.0/device/ping");
QJsonObject json;
json.insert("sn",deviceStatus.sSn);
json.insert("type",deviceStatus.type);
json.insert("status",deviceStatus.status);
QJsonDocument jsonDoc;
jsonDoc.setObject(json);
QByteArray bytearr= jsonDoc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
if(m_httpClient.post(request,bytearr)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
resp->msg=map["msg"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
vides_data::response *NewHttpService::httpFindCameras(QString &serialNumber,std::list<vides_data::responseDeviceStatus*>&datas) {
httpUrl.append("/api/v1.0/device/all");
vides_data::response *resp=new vides_data::response();
QUrlQuery query;
query.addQueryItem("sn",serialNumber);
QNetworkRequest request;
QUrl url(httpUrl);
url.setQuery(query);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
if(m_httpClient.get(request)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
QJsonArray dataArray = map["data"].toJsonArray(); // 将"data"字段转换为QJsonArray
for (const QJsonValue& value : dataArray) {
vides_data::responseDeviceStatus *res=new vides_data::responseDeviceStatus();
QJsonObject dataObject = value.toObject(); // 将数组中的每个元素转换为QJsonObject
// 从每个对象中获取所需的数据并进行处理
QString sn = dataObject["sn"].toString();
res->sSn=dataObject["sn"].toString();
res->type=dataObject["type"].toInt();
res->merchant_id= dataObject["merchant_id"].toInt();
datas.push_back(res);
}
resp->msg=map["msg"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
#ifndef NEWHTTPSERVICE_H
#define NEWHTTPSERVICE_H
#include <QObject>
#include "HttpClient.h"
#include "VidesData.h"
#include "Common.h"
#include <list>
#include <QJsonObject>
#include <QJsonArray>
#include <QJsonDocument>
#include <QUrlQuery>
const QString OPERATION_FAILED = "操作失败";
const QString OPERATION_SUCCESS = "操作成功";
class NewHttpService : public QObject {
Q_OBJECT
public:
static NewHttpService& getInstance() {
static NewHttpService instance;
return instance;
}
vides_data::response* httpPostDeviceStatus(vides_data::requestDeviceStatus & deviceStatus);
vides_data::response *httpFindCameras(QString &serialNumber,std::list<vides_data::responseDeviceStatus*>&datas);
void setHttpUrl(const QString& url);
public:
NewHttpService();
~NewHttpService();
HttpClient m_httpClient;
QString httpUrl;
};
#endif // NEWHTTPSERVICE_H
#include "ParkingSpaceInfo.h"
ParkingSpaceInfo::ParkingSpaceInfo(RecognizedInfo &currentPlate)
:currentPlate(currentPlate)
{
}
ParkingSpaceInfo::ParkingSpaceInfo(){
}
ParkingSpaceInfo::~ParkingSpaceInfo(){
}
void ParkingSpaceInfo::addQueue(RecognizedInfo &info){
QMutexLocker locker(&queueMutex);
queuels.enqueue(info);
}
void ParkingSpaceInfo::removeQueue(){
QMutexLocker locker(&queueMutex);
if (!queuels.isEmpty()) {
queuels.dequeue();
}
}
void ParkingSpaceInfo::removeNoQueue() {
QMutexLocker locker(&queueMutex);
if (!queuels.isEmpty() && queuels.size() > 3) {
// 逆向遍历,这样在移除元素时不会影响还未遍历到的元素的索引
for (int i = queuels.size() - 1; i >= 0; --i) {
if (queuels[i].getLicensePlate().isEmpty()) {
queuels.removeAt(i);
}
}
}
}
QQueue<RecognizedInfo> &ParkingSpaceInfo::getQueue(){
return queuels;
}
void ParkingSpaceInfo::setArea(vides_data::ParkingArea &a){
QMutexLocker locker(&queueMutex);
this->area=a;
}
vides_data::ParkingArea& ParkingSpaceInfo::getArea(){
return area;
}
RecognizedInfo& ParkingSpaceInfo::getCurrentPlate(){
return currentPlate;
}
void ParkingSpaceInfo::setCurrentPlate(RecognizedInfo &current){
QMutexLocker locker(&queueMutex);
this->currentPlate=current;
}
int ParkingSpaceInfo::getSpaceIndex(){
return spaceIndex;
}
void ParkingSpaceInfo::setSpaceIndex(int spaceIndex){
QMutexLocker locker(&queueMutex);
this->spaceIndex=spaceIndex;
}
#ifndef PARKINGSPACEINFO_H
#define PARKINGSPACEINFO_H
#include "VidesData.h"
#include "RecognitionInfo.h"
#include <QMutex>
#include <QQueue>
class ParkingSpaceInfo {
public:
ParkingSpaceInfo(RecognizedInfo & currentPlate);
ParkingSpaceInfo();
~ParkingSpaceInfo();
RecognizedInfo& getCurrentPlate();
void setCurrentPlate(RecognizedInfo & current);
void addQueue(RecognizedInfo &info);
void removeQueue();
void removeNoQueue();
QQueue<RecognizedInfo> &getQueue();
void setArea(vides_data::ParkingArea &a);
vides_data::ParkingArea &getArea();
int getSpaceIndex();
void setSpaceIndex(int spaceIndex);
private:
QQueue<RecognizedInfo> queuels;
RecognizedInfo currentPlate;
vides_data::ParkingArea area;
int spaceIndex;
QMutex queueMutex;
};
#endif // PARKINGSPACEINFO_H
#ifndef QTQtHttpClient_H
#define QTQtHttpClient_H
#include <functional>
#include <QMap>
#include <QVariant>
#include <QStringList>
#include <QNetworkReply>
#include <QNetworkRequest>
#include <QNetworkAccessManager>
class QtHttpClientPrivate;
/**
* 对 QNetworkAccessManager 简单封装的 HTTP 访问客户端,简化 GET、POST、PUT、DELETE、上传、下载等操作。
* 在执行请求前设置需要的参数和回调函数:
* 1. 调用 header() 设置请求头
* 2. 调用 param() 设置参数,使用 Form 表单的方式提交请求,GET 请求的 query parameters 也可以用它设置
* 3. 调用 json() 设置 JSON 字符串的 request body,Content-Type 为 application/json,
* 当然也可以不是 JSON 格式,因使用 request body 的情况多数是使用 JSON 格式传递复杂对象,故命名为 json
* 4. 调用 success() 注册请求成功的回调函数
* 5. 调用 fail() 注册请求失败的回调函数
* 6. 调用 complete() 注册请求结束的回调函数
* success(), fail(), complete() 的回调函数是可选的,根据需要注册对应的回调函数,也可以一个都不注册
* 然后根据请求的类型调用 get(), post(), put(), remove(), download(), upload() 执行 HTTP 请求
*
* 默认 QtHttpClient 会创建一个 QNetworkAccessManager,如果不想使用默认的,调用 manager() 传入即可。
* 调用 debug(true) 设置为调试模式,输出调试信息如 URL、参数等。
*/
class QtHttpClient {
public:
QtHttpClient(const QString &url);
~QtHttpClient();
void stop2();
/**
* @brief 每创建一个 QNetworkAccessManager 对象都会创建一个线程,当频繁的访问网络时,为了节省线程资源,
* 可以传入 QNetworkAccessManager 给多个请求共享 (它不会被 QtHttpClient 删除,用户需要自己手动删除)。
* 如果没有使用 manager() 传入一个 QNetworkAccessManager,则 QtHttpClient 会自动的创建一个,并且在网络访问完成后自动删除它。
*
* @param manager 执行 HTTP 请求的 QNetworkAccessManager 对象
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& manager(QNetworkAccessManager *manager);
/**
* @brief 参数 debug 为 true 则使用 debug 模式,请求执行时输出请求的 URL 和参数等
*
* @param debug 是否启用调试模式
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& debug(bool debug);
/**
* @brief 添加一个请求的参数,可以多次调用添加多个参数
*
* @param name 参数的名字
* @param value 参数的值
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& param(const QString &name, const QVariant &value);
/**
* @brief 添加多个请求的参数
*
* @param ps QMap 类型的参数,key 为参数名,value 为参数值
* 可以使用 {{"name", 1}, {"box", 2}} 的方式创建 QMap 对象
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& params(const QMap<QString, QVariant> &ps);
/**
* @brief 添加请求的参数 (请求体),使用 Json 格式,例如 "{\"name\": \"Alice\"}"
*
* @param json 请求体 (request body) 为 Json 格式的参数字符串
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& json(const QString &json);
/**
* @brief 添加请求头
*
* @param name 请求头的名字
* @param value 请求头的值
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& header(const QString &name, const QString &value);
/**
* @brief 添加多个请求头
*
* @param nameValues 请求头的名字和值对
* 可以使用 {{"name", 1}, {"box", 2}} 的方式创建 QMap 对象
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& headers(const QMap<QString, QString> nameValues);
/**
* @brief 注册请求成功的回调函数
*
* @param successHandler 成功的回调函数,参数为响应的字符串
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& success(std::function<void(const QString &)> successHandler);
/**
* @brief 注册请求失败的回调函数
*
* @param failHandler 失败的回调函数,参数为失败原因和 HTTP 状态码
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& fail(std::function<void(const QString &, int)> failHandler);
/**
* @brief 注册请求结束的回调函数,不管成功还是失败请求结束后都会执行
*
* @param completeHandler 完成的回调函数,无参数
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& complete(std::function<void()> completeHandler);
/**
* @brief 设置请求响应的字符集,默认使用 UTF-8
*
* @param cs 字符集
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& charset(const QString &cs);
/**
* @brief 执行 GET 请求
*/
void get();
/**
* @brief 执行 POST 请求
*/
void post();
/**
* @brief 执行 PUT 请求
*/
void put();
/**
* @brief 执行 DELETE 请求,由于 delete 是 C++ 的运算符,所以用同义词 remove
* 注意: Qt 提供的 DELETE 请求是不支持传递参数的,
* 请参考 QNetworkAccessManager::deleteResource(const QNetworkRequest &request)
*/
void remove();
/**
* @brief 使用 GET 进行下载,下载的文件保存到 savePath
*
* @param savePath 下载的文件保存路径
*/
void download(const QString &savePath);
/**
* @brief 上传单个文件
* 使用 POST 上传,服务器端获取文件的参数名为 file
*
* @param path 要上传的文件的路径
*/
void upload(const QString &path);
/**
* @brief 上传文件,文件的内容已经读取到 data 中
* 使用 POST 上传,服务器端获取文件的参数名为 file
*
* @param path 要上传的文件的路径
*/
void upload(const QByteArray &data);
/**
* @brief 上传多个文件
* 使用 POST 上传,服务器端获取文件的参数名为 files
*
* @param paths 要上传的文件的路径
*/
void upload(const QStringList &paths);
private:
QtHttpClientPrivate *d;
}
#endif // QTQtHttpClient_H
#ifndef RECOGNITIONINFO_H
#define RECOGNITIONINFO_H
#include <QString>
#include <map>
class RecognizedInfo {
public:
RecognizedInfo(const QString& plate, qint64 time, QString color);
RecognizedInfo();
~RecognizedInfo();
QString getLicensePlate();
qint64 getRecognizeTime() ;
QString getColor();
void setLicensePlate(const QString& plate);
void setRecognizeTime(qint64 time);
void setColor(QString &color);
private:
QString licensePlate; // 车牌号码
qint64 recognizeTime; // 识别时间
QString color;
};
#endif // RECOGNITIONINFO_H
#include "RecognitionInfo.h"
RecognizedInfo::RecognizedInfo(){
}
RecognizedInfo::RecognizedInfo(const QString& plate, qint64 time,
QString color)
: licensePlate(plate),
recognizeTime(time),
color(color)
{
}
// Getter 方法
QString RecognizedInfo::getLicensePlate() {
return licensePlate;
}
qint64 RecognizedInfo::getRecognizeTime() {
return recognizeTime;
}
QString RecognizedInfo::getColor(){
return color;
}
// Setter 方法
void RecognizedInfo::setLicensePlate(const QString& plate) {
this->licensePlate = plate;
}
void RecognizedInfo::setRecognizeTime(qint64 time) {
this->recognizeTime = time;
}
void RecognizedInfo::setColor(QString &color){
this->color=color;
}
RecognizedInfo::~RecognizedInfo(){
}
File added
_00_fdet_160
_01_lmk
_02_pose_fp16
_03_extract
_04_refine_net
_05_mask
_06_msafa27
_07_pose_q_fp16
#ifndef TASK_H
#define TASK_H
class Task
{
public:
Task() {}
~Task(){}
};
#endif // TASK_H
#include "TaskRunnable.h"
TaskRunnable::TaskRunnable(std::function<void()> newTask, int hDevice, int channel, RunFunction func)
:m_hDevice(hDevice), m_channel(channel), runFunction(func){
if (runFunction == SdkDevSnapSyn) {
this->devSnapSyn = newTask;
}
if(runFunction==SdkCallbackFunction){
this->callbackFunction = newTask;
}
this->setAutoDelete(true);
}
TaskRunnable::~TaskRunnable(){
static int i=0;
printf("TaskRunnable被析构%d次\n", ++i);
}
void TaskRunnable::setString(const QString& str){
this->szString=str;
}
void TaskRunnable::setHdevice(const int& hDevice){
this->m_hDevice=hDevice;
}
void TaskRunnable::setChannel(const int& channel){
this->m_channel=channel;
}
void TaskRunnable::setRunFunction(RunFunction func) {
this->runFunction = func;
}
void TaskRunnable::setDevSnapSyn(const DevSnapSyn& function) {
this->devSnapSyn = function;
}
void TaskRunnable::setCallbackFunction(const CallbackFunction& function) {
this->callbackFunction = function;
}
void TaskRunnable::run() {
try {
if (runFunction == SdkDevSnapSyn) {
devSnapSyn(); // 调用函数
} else if(runFunction == SdkCallbackFunction) {
callbackFunction(); // 调用函数
}
} catch (const std::exception& e) {
qDebug() << "在任务运行过程中发生异常:" << e.what();
} catch (...) {
qDebug() << "在任务运行过程中发生未知异常";
}
}
#ifndef TASKRUNNABLE_H
#define TASKRUNNABLE_H
#include <QMutex>
#include <QMutexLocker>
#include <QRunnable>
#include <QString>
#include <functional>
#include <QDebug>
enum RunFunction {
SdkDevSnapSyn,
SdkCallbackFunction
};
class TaskRunnable : public QRunnable {
public:
typedef std::function<void()> DevSnapSyn;
typedef std::function<void()> CallbackFunction;
~TaskRunnable();
TaskRunnable(std::function<void()> newTask, int hDevice, int channel, RunFunction function);
void setString(const QString& str);
void setHdevice(const int& hDevice);
void setChannel(const int& channel);
void setRunFunction(RunFunction func);
void setDevSnapSyn(const DevSnapSyn& function);
void setCallbackFunction(const CallbackFunction& function);
void run() override;
private:
DevSnapSyn devSnapSyn;
CallbackFunction callbackFunction;
int m_hDevice;
int m_channel;
QString szString;
RunFunction runFunction;
QMutex mutex;
};
#endif // TASKRUNNABLE_H
#include "ThreadSafeQueue.h"
template <typename T>
ThreadSafeQueue<T>::ThreadSafeQueue(int maxCount)
: maxCount_(maxCount)
{
}
template <typename T>
bool ThreadSafeQueue<T>::push(const T& value) {
std::lock_guard<std::mutex> lock(mutex_);
if (queue_.size() < maxCount_) {
queue_.push_back(value);
return true;
}
return false;
}
template <typename T>
bool ThreadSafeQueue<T>::pop(T& value) {
std::lock_guard<std::mutex> lock(mutex_);
if (!queue_.empty()) {
value = queue_.front();
queue_.pop_back();
return true;
}
return false;
}
template <typename T>
bool ThreadSafeQueue<T>::empty() const {
std::lock_guard<std::mutex> lock(mutex_);
return queue_.empty();
}
template <typename T>
bool ThreadSafeQueue<T>::full() const {
std::lock_guard<std::mutex> lock(mutex_);
return queue_.size() >= maxCount_;
}
template <typename T>
int ThreadSafeQueue<T>::size() const {
std::lock_guard<std::mutex> lock(mutex_);
return queue_.size();
}
#ifndef THREADSAFEQUEUE_H
#define THREADSAFEQUEUE_H
#include <vector>
#include <mutex>
template <typename T>
class ThreadSafeQueue {
public:
ThreadSafeQueue(int maxCount);
bool push(const T& value);
bool pop(T& value);
bool empty() const;
bool full() const;
int size() const;
private:
std::mutex mutex_;
std::vector<T> queue_;
int maxCount_;
};
#endif // THREADSAFEQUEUE_H
#ifndef VIDESDATA_H
#define VIDESDATA_H
#include "XNetSDKDefine.h"
#include<QString>
#include <QProcess>
#include <QDate>
#include <QProcess>
#include <QRegularExpression>
#include <QFile>
#include <QTextStream>
#include <QByteArray>
#include <QNetworkInterface>
#include <list>
namespace vides_data{
constexpr const char *HEADER_TYPE_KAY="Content-Type";
constexpr const char *HEADER_TYPE_VALUE="application/json";
constexpr const char *PROFLIE_TEST= "test";
struct response
{
int code;
void* data;
QString msg;
response() {}
};
struct requestDeviceStatus
{
QString sSn;
int8_t type;
int8_t status;
QString ip_addr;
requestDeviceStatus() {}
};
struct responseStsCredentials{
QString access_key_id;
QString access_key_secret;
QString bucket;
QString endpoint;
QString expiration;
QString security_token;
};
struct responseGb28181 {
QString sip_ip;
int sip_port;
QString serial;
QString realm;
QString username;
QString password;
int register_validity;
int heartbeat_interval;
QString device_id;
QString channel_id;
};
struct responseArea {
float bottom_right_corner_x;
float bottom_right_corner_y;
float bottom_left_corner_x;
float bottom_left_corner_y;
float top_left_corner_x;
float top_left_corner_y;
float top_right_corner_x;
float top_right_corner_y;
};
struct responseDeviceStatus
{
QString sSn;
int8_t type;
int8_t merchant_id;
std::list<responseArea>areas;
responseDeviceStatus() {}
};
struct responseDeviceData{
std::list<responseDeviceStatus> list;
responseStsCredentials sts_credentials;
};
struct localDeviceStatus
{
QString sSn;
int8_t type;
int8_t merchant_id;
SXSDK_IPAddress HostIP;
int HttpPort;
int TCPPort;
int ChannelNum;
QString UserName;
QString password;
localDeviceStatus() {}
};
struct requestFaceReconition
{
QString id;
QByteArray img;
QString sn;
qint64 time;
responseArea area;
requestFaceReconition() {}
};
struct faceRecognitionResult
{
QString id;
uint32_t x;
uint32_t y;
uint32_t width;
uint32_t height;
faceRecognitionResult() {}
};
struct responseFaceReconition
{
QString id;
QString img;
responseFaceReconition() {}
};
struct ParkingArea
{
float topLeftCornerX;
float topLeftCornerY;
float bottomLeftCornerX;
float bottomLeftCornerY;
float bottomRightCornerX;
float bottomRightCornerY;
float topRightCornerX;
float topRightCornerY;
ParkingArea() {}
};
struct LicensePlate
{
ParkingArea areaLocation;
QString new_plate;
QString new_color;
QByteArray img;
qint64 time;
ParkingArea recognition;
LicensePlate() {}
};
struct requestLicensePlate
{
QString sn;
std::list<LicensePlate> plates;
requestLicensePlate() {}
};
struct cameraParameters
{
QString sDevId;
int nDevPort;
QString sUserName;
QString sPassword;
int channel;
QString httpUrl;
QString sSn;
QString rtspUrl;
QString rtmpUrl;
cameraParameters() {}
};
struct NetWorkNetCommon {
char* GateWay; // 网关IP
char* HostIP; // 主机IP
char* HostName; // 主机名
int HttpPort; // HTTP服务端口
char* MAC; // MAC地址
int MaxBps; // 限定码流值
char* MonMode; // 监视协议 {"TCP","UDP","MCAST",…}
int SSLPort; // SSL侦听端口
char* Submask; // 子网掩码
int TCPMaxConn; // 最大连接数
int TCPPort; // TCP侦听端口
char* TransferPlan; // 传输策略"AutoAdapt":自适应, "Quality":质量优先, "Fluency":流量优先,"Transmission":网传优先
int UDPPort; // UDP侦听端口
bool UseHSDownLoad; // 是否启用高速录像下载
};
struct responseRecognitionData
{
int id;
qint64 inTime;
qint64 outTime;
int recognitionType;
QString sn;
};
inline bool isVirtualMachine()
{
QString dmiPath;
#ifdef Q_OS_WIN
dmiPath = "HKEY_LOCAL_MACHINE\\HARDWARE\\DESCRIPTION\\System";
#else
dmiPath = "/sys/devices/virtual/dmi/id/";
#endif
QFile file(dmiPath);
return file.exists();
}
inline QString getDefaultGateway() {
QProcess process;
QString gateway;
// 根据操作系统的不同选择不同的命令
#ifdef Q_OS_WIN
QString command = "ipconfig";
QStringList arguments;
arguments << "/all";
#elif defined(Q_OS_LINUX)
QString command = "ip";
QStringList arguments;
arguments << "route" << "show" << "default";
#elif defined(Q_OS_MAC)
QString command = "netstat";
QStringList arguments;
arguments << "-nr";
#endif
process.start(command, arguments);
// 等待进程结束
process.waitForFinished();
// 读取并处理输出
QString output(process.readAllStandardOutput());
#ifdef Q_OS_WIN
// 使用正则表达式来查找默认网关
QRegExp rx("Default Gateway[ .]*: (.+?)(\r\n|\n)");
if (rx.indexIn(output) != -1) {
gateway = rx.cap(1).trimmed();
}
#elif defined(Q_OS_LINUX) || defined(Q_OS_MAC)
// 对于Linux和Mac,分行并查找包含default字样的行
QStringList lines = output.split('\n');
QRegularExpression rx("^default via (\\S+)");
foreach (const QString &line, lines) {
QRegularExpressionMatch match = rx.match(line);
if (match.hasMatch()) {
gateway = match.captured(1);
break;
}
}
#endif
return gateway;
}
inline bool pingAddress(const QString &address) {
QProcess process;
QString program = "ping";
QStringList arguments;
arguments << "-c" << "1" << address; // -c 1 表示发送一个 Ping 包
process.start(program, arguments);
process.waitForFinished();
QString output(process.readAllStandardOutput());
return output.contains("1 packets transmitted, 1 received");
}
inline QString getSerialNumber() {
QProcess process;
// 使用管道将两个命令的执行结果串联起来,直接查找包含"Serial"的行
process.start("bash", QStringList() << "-c" << "cat /proc/cpuinfo | grep Serial");
process.waitForFinished(-1); // 等待命令执行完成
QString output = process.readAllStandardOutput();
QString serialNumber;
if (!output.isEmpty()) {
// 已经确保了输出仅包含 Serial 行,所以直接分割并提取
serialNumber = output.split(":").at(1).trimmed();
}
return serialNumber;
}
}
#endif // VIDESDATA_H
blur.jpg

5.9 KB

This source diff could not be displayed because it is too large. You can view the blob instead.
crop.png

20.1 KB

cxk.jpg

129 KB

QT += core gui network multimedia sql concurrent
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
CONFIG += c++11
TARGET = GAMERAVIDEO
TEMPLATE = app
# The following define makes your compiler emit warnings if you use
# any Qt feature that has been marked deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
#QMAKE_LIBDIR += /usr/local/lib
#INCLUDEPATH+=/usr/local/include/opencv4
#INCLUDEPATH+=/usr/local/include/hyperface
#INCLUDEPATH+=/usr/local/include/hyper
#INCLUDEPATH+=/usr/local/include/XNetSDK
#INCLUDEPATH+=/usr/local/include/human
unix:contains(QMAKE_HOST.arch, x86_64) {
QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
}
unix:contains(QMAKE_HOST.arch, arm) {
QMAKE_LIBDIR += /usr/local/lib
}
# 根据编译器类型选择库路径和头文件路径
unix: {
# x86 架构
contains(QMAKE_HOST.arch, x86_64) {
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
}
# ARM 架构
contains(QMAKE_HOST.arch, arm) {
INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK
#INCLUDEPATH+=/usr/local/include/human
}
}
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
#INCLUDEPATH+=/usr/local/include/opencv4
#INCLUDEPATH+=/usr/local/include/hyperface
#INCLUDEPATH+=/usr/local/include/hyper
#QMAKE_LIBDIR += /usr/local/lib
#OPENCV_LIBS=-L /usr/local/lib -lopencv_core -lopencv_highgui -lopencv_videoio -lopencv_imgproc -lHyperFace -lopencv_imgcodecs
LIBS += -lopencv_core \
-lopencv_highgui \
-lopencv_videoio \
-lopencv_imgproc \
-lopencv_video \
-lHyperFace \
-lopencv_imgcodecs \
-lhyperlpr3 \
-lopencv_objdetect \
-lsohuman \
# -lssl \
# -lcrypto \
-lc \
-lXNetSDK
#-lz
SOURCES += \
Common.cpp \
FaceReconition.cpp \
LogHandler.cpp \
main.cpp \
mainwindow.cpp \
LicensePlateRecognition.cpp \
MediaFaceImage.cpp \
RecognizedInfo.cpp \
Httpclient.cpp \
HttpService.cpp \
TaskRunnable.cpp \
CameraHandle.cpp \
ParkingSpaceInfo.cpp \
HumanDetection.cpp
HEADERS += \
Common.h \
FaceRecognition.h \
LogHandle.h \
mainwindow.h \
LicensePlateRecognition.h \
MediaFaceImage.h \
RecognitionInfo.h \
HttpClient.h \
HttpService.h \
VidesData.h \
TaskRunnable.h \
CameraHandle.h \
ParkingSpaceInfo.h \
HumanDetection.h
#FORMS += \
# mainwindow.ui
# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target
RESOURCES += \
BG.qrc
QT += core gui network multimedia sql concurrent
CONFIG += c++11 console
CONFIG -= app_bundle
TARGET = GAMERAVIDEONOUI
TEMPLATE = app
# The following define makes your compiler emit warnings if you use
# any Qt feature that has been marked deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
unix:contains(QMAKE_HOST.arch, x86_64) {
QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
}
unix:contains(QMAKE_HOST.arch, arm) {
QMAKE_LIBDIR += /usr/local/lib
}
# 根据编译器类型选择库路径和头文件路径
unix: {
# x86 架构
contains(QMAKE_HOST.arch, x86_64) {
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
}
# ARM 架构
contains(QMAKE_HOST.arch, arm) {
INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human
}
}
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
#INCLUDEPATH+=/usr/local/include/opencv4
#INCLUDEPATH+=/usr/local/include/hyperface
#INCLUDEPATH+=/usr/local/include/hyper
#QMAKE_LIBDIR += /usr/local/lib
#OPENCV_LIBS=-L /usr/local/lib -lopencv_core -lopencv_highgui -lopencv_videoio -lopencv_imgproc -lHyperFace -lopencv_imgcodecs
LIBS += -lopencv_core \
-lopencv_highgui \
-lopencv_videoio \
-lopencv_imgproc \
-lopencv_video \
-lHyperFace \
-lopencv_imgcodecs \
-lhyperlpr3 \
-lopencv_objdetect \
-lsohuman \
# -lssl \
# -lcrypto \
#-lc \
-lXNetSDK
#-lz
HEADERS += \
Common.h \
FaceRecognition.h \
LogHandle.h \
mainwindow.h \
LicensePlateRecognition.h \
MediaFaceImage.h \
RecognitionInfo.h \
HttpClient.h \
HttpService.h \
VidesData.h \
TaskRunnable.h \
CameraHandle.h \
ParkingSpaceInfo.h \
HumanDetection.h
SOURCES += \
Common.cpp \
FaceReconition.cpp \
LogHandler.cpp \
main.cpp \
mainwindow.cpp \
LicensePlateRecognition.cpp \
MediaFaceImage.cpp \
RecognizedInfo.cpp \
Httpclient.cpp \
HttpService.cpp \
TaskRunnable.cpp \
CameraHandle.cpp \
ParkingSpaceInfo.cpp \
HumanDetection.cpp
# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target
[devices]
rtps_urls=/home/mark/Public/build_gamera_videos/VID_20231122_145207.mp4
frame_counter=127
camera_logins=192.168.10.183:34567
username=admin
password=admin
sz_config_path=/home/mark/Public/build_gamera_videos/szConfigPath
sz_temp_path=/home/mark/Public/build_gamera_videos/szTempPath
[timer]
checkofflinetimer=10000000
delete_logfile_timer=86400000
delete_mkvflie_timer=100000
[faceFaceRecognitions]
images=/home/mark/Public/build_gamera_videos/images/lisi.jpg,/home/mark/Public/build_gamera_videos/images/guanyu.jpg,/home/mark/Public/build_gamera_videos/images/zhangfei.jpg
names=李四,关羽,张飞
[licensePlateRecognition]
model_paths=/home/mark/Public/build_gamera_videos/lprv3u_models/s
kun.jpg

212 KB

#include <QCoreApplication>
#include <opencv2/opencv.hpp>
#include "mainwindow.h"
int main(int argc, char *argv[])
{
QCoreApplication a(argc, argv);
qRegisterMetaType<cv::Mat>("cv::Mat");
// 设置环境变量
QString value = "rtsp_transport;udp";
qputenv("OPENCV_FFMPEG_CAPTURE_OPTIONS", value.toUtf8());
qputenv("QT_LOGGING_RULES", "qt.network.ssl=true");
MainWindow w;
return a.exec();
}
#include "mainwindow.h"
MainWindow* MainWindow::sp_this=nullptr;
MainWindow::MainWindow()
{
sp_this=this;
LogHandler::Get().installMessageHandler();
QString inifile=QCoreApplication::applicationDirPath()+"/gameras.ini";
qSetting = new QSettings(inifile,QSettings::IniFormat);
qSetting->setIniCodec(QTextCodec::codecForName("UTF-8"));
modelPaths=qSetting->value("licensePlateRecognition/model_paths").toString();
initVideoOutPath();
deleteLogFileTimer =new QTimer(this);
connect(deleteLogFileTimer, &QTimer::timeout, this, &MainWindow::deleteLogFile);
int deleteLogfileTimer=qSetting->value("timer/delete_logfile_timer").toInt();
deleteLogFileTimer->start(deleteLogfileTimer);
deleteFrameFileTimer =new QTimer(this);
int deMkvflieTimer=qSetting->value("timer/delete_mkvflie_timer").toInt();
connect(deleteFrameFileTimer,&QTimer::timeout,this,&MainWindow::deleteMkvFileTimer);
deleteFrameFileTimer->start(deMkvflieTimer);
initFaceFaceRecognition();
FaceReconition &faceRecognition = FaceReconition::getInstance();
float confidence=qSetting->value("devices/confidence").toFloat();
if(localImageMap.size()>0){
faceRecognition.initSourceImageMap(localImageMap,confidence);
}
float carConfidence=qSetting->value("devices/carConfidence").toFloat();
//LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance();
//licensePlateRecogn.initHlprContext(modelPaths,qSetting->value("licensePlateRecognition/car_cascade_path").toString(),carConfidence);
QString httpurl;
QString profile=qSetting->value("cloudservice/profile","test").toString();
if(strcmp(profile.toUtf8().data(),vides_data::PROFLIE_TEST)==0){
httpurl=qSetting->value("cloudservice/test_http").toString();
}else{
httpurl=qSetting->value("cloudservice/pro_http").toString();
}
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
QString configPath = qSetting->value("devices/sz_config_path").toString();
QString tempPath = qSetting->value("devices/sz_temp_path").toString();
int sdk_handle= mediaFaceImage->SdkInit(configPath,tempPath);
qDebug()<<"句柄为:"<<sdk_handle;
if(sdk_handle<0){
qInfo() << "sdk初始化失败";
return;
}
connect(this, SIGNAL(shutdownSignals(QString,int)), this, SLOT(clearHandle(QString,int)),Qt::QueuedConnection);
dePermissionSynTimer=new QTimer(this);
int dePermissionTimer=qSetting->value("timer/device_permission_syn_timer").toInt();
connect(dePermissionSynTimer, &QTimer::timeout, this, [this, httpurl](){
this->startCamera(httpurl);
},Qt::QueuedConnection);
dePermissionSynTimer->start(dePermissionTimer);
connect(&server, &QTcpServer::newConnection, this, &MainWindow::handleMatNewConnection);
int port=qSetting->value("localservice/port").toInt();
if (!server.listen(QHostAddress::Any, port)) {
qInfo() << "Error: Unable to start the server on port 12345";
} else {
qDebug() << "Server started, listening on port 12345";
}
}
void MainWindow::sendJsonResponse(QTcpSocket* socket, int code, const QString& data, const QString& msg){
QJsonObject jsonResponse;
jsonResponse["code"] = code;
jsonResponse["data"] = data;
jsonResponse["msg"] = msg;
QJsonDocument doc(jsonResponse);
QByteArray jsonData = doc.toJson();
socket->write(jsonData);
socket->flush();
socket->waitForBytesWritten();
socket->disconnectFromHost();
}
void MainWindow::sendEmptyResponse(QTcpSocket* socket){
sendJsonResponse(socket, 200, "", "Success");
}
void MainWindow::sendNotFoundResponse(QTcpSocket* socket){
QString httpResponse = "HTTP/1.1 404 Not Found\r\n"
"Content-Type: text/html\r\n"
"Connection: Close\r\n\r\n"
"<html><body><h1>404 Not Found</h1>"
"<p>The requested URL was not found on this server.</p></body></html>";
socket->write(httpResponse.toUtf8());
socket->flush();
socket->waitForBytesWritten();
socket->disconnectFromHost();
}
void MainWindow::handleMatNewConnection(){
QTcpSocket* socket = server.nextPendingConnection();
QObject::connect(socket, &QTcpSocket::readyRead, [&, socket](){
QByteArray requestData = socket->readAll();
QString request(requestData);
QRegExp urlRegEx("GET /cameras/([^/]+)/current_image HTTP");
if(urlRegEx.indexIn(request) != -1) {
QString sn = urlRegEx.cap(1);
qDebug() << "Requested Camera SN: " << sn;
bool foundCamera = false;
for (auto it = faceDetectionParkingPushs.begin(); it != faceDetectionParkingPushs.end(); ++it) {
QString currentSn = it->second->getSSn();
if (currentSn == sn) {
CameraHandle* matchedHandle = it->second;
std::vector<uchar> buffer;
matchedHandle->getCurrentFrame(buffer);
QByteArray byteArray(reinterpret_cast<const char*>(buffer.data()), buffer.size());
QString base64Data = byteArray.toBase64();
sendJsonResponse(socket, 200, base64Data, "Success");
foundCamera = true;
break;
}
}
if(!foundCamera){
sendEmptyResponse(socket);
}
} else {
sendNotFoundResponse(socket);
}
});
}
void MainWindow::modifyImagesAndNames(QString &modId){
QString fullPathName;
Common & instace= Common::getInstance();
if(cloudImageMap.count(modId) <=0 ){
return;
}
QString ossUrl= cloudImageMap.at(modId);
HttpService httpService(ossUrl);
vides_data::response *res= httpService.httpDownload(instace.getImages(),fullPathName);
if(res->code!=0){
qInfo()<<"httpFindCameras请求失败";
instace.deleteObj(res);
return ;
}
QString settingKey = QString("%1").arg(modId);
qSetting->setValue(settingKey,fullPathName);
localImageMap[modId]=fullPathName;
instace.deleteObj(res);
}
void MainWindow::removeImageFiles(QString id){
auto localIt = localImageMap.find(id);
if (localIt != localImageMap.end()) {
QString value = localIt->second;
QFile file(value);
if (!file.open(QIODevice::WriteOnly)) {
qDebug() << "removeImageFiles open fail" << value;
} else {
file.remove();
}
}
}
void MainWindow::updateLocalFace(const QString &httpurl) {
Common &instance = Common::getInstance();
HttpService httpService(httpurl);
QString serialNumber;
std::list<vides_data::responseFaceReconition*> datas;
this->findLocalSerialNumber(serialNumber);
vides_data::response *res = httpService.httpFindFaceReconition(serialNumber, datas);
if (res->code != 0) {
qInfo() << "httpFindCameras请求失败";
instance.deleteObj (res); // 手动释放资源
return;
}
QSet<QString> dataIds;
bool isChanged=false;
for (const auto& item : datas) {
dataIds.insert(item->img);
if (cloudImageMap.count(item->id) > 0) {
if (cloudImageMap.at(item->id) != item->img) {
qSetting->beginGroup("cloudImageMap");
QString settingKey = QString("%1").arg(item->id);
qSetting->setValue(settingKey, item->img);
qSetting->endGroup();
cloudImageMap[item->id] = item->img;
removeImageFiles(item->id);
qSetting->beginGroup("localImageMap");
this->modifyImagesAndNames(item->id);
qSetting->endGroup();
isChanged=true;
}
} else {
qSetting->beginGroup("cloudImageMap");
QString settingKey = QString("%1").arg(item->id);
qSetting->setValue(settingKey, item->img);
qSetting->endGroup();
cloudImageMap[item->id] = item->img;
qSetting->beginGroup("localImageMap");
this->modifyImagesAndNames(item->id);
qSetting->endGroup();
isChanged=true;
}
}
for (auto it = cloudImageMap.begin(); it != cloudImageMap.end();) {
if (!dataIds.contains(it->second)) {
qSetting->beginGroup("cloudImageMap");
qSetting->remove(it->first);
qSetting->endGroup();
auto localIt = localImageMap.find(it->first);
if (localIt != localImageMap.end()) {
QFile file(localIt->second);
if (!file.open(QIODevice::WriteOnly)) {
qDebug() << "open fail" << localIt->second;
} else {
file.remove();
qSetting->beginGroup("localImageMap");
qSetting->remove(localIt->first);
qSetting->endGroup();
localImageMap.erase(localIt);
}
}
isChanged=true;
it = cloudImageMap.erase(it);
} else {
++it;
}
}
FaceReconition &faceRecognition = FaceReconition::getInstance();
if (isChanged) {
if (cloudImageMap.empty()) {
// 如果云端映射现在为空,移除所有特征
faceRecognition.featureRemove();
} else {
float confidence=qSetting->value("devices/confidence").toFloat();
qDebug()<<"startMap != endMap-->";
faceRecognition.initSourceImageMap(localImageMap,confidence);
}
}
instance.deleteObj(res);
}
void MainWindow::findLocalSerialNumber(QString &serialNumber){
if(vides_data::isVirtualMachine()){
serialNumber = QSysInfo::machineUniqueId();
}else{
serialNumber =vides_data::getSerialNumber();
if (!serialNumber.isEmpty()) {
qDebug() << "CPU Serial Number:" << serialNumber;
} else {
qDebug() << "CPU Serial Number not found!";
return;
}
}
}
void MainWindow::clearHandle(QString sDevId, int nDevPort){
QString key = sDevId + ":" + QString::number(nDevPort);
Common & instace= Common::getInstance();
auto it = this->faceDetectionParkingPushs.find(key);
if (it != this->faceDetectionParkingPushs.end()) {
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qDebug()<<"clearHandle:离线的设备是:"<<key;
CameraHandle* offlineCameraHandle = it->second; // 注意使用->second获取值
int hDevice=offlineCameraHandle->getHdevice();
instace.deleteObj(offlineCameraHandle);
this->faceDetectionParkingPushs.erase(it); // 使用迭代器来删除,更安全,避免潜在的问题
mediaFaceImage->clearCurrentDevice(hDevice);
}
}
void MainWindow::clearOfflineCameraHandle(QString sDevId, int nDevPort) {
emit shutdownSignals(sDevId,nDevPort);
}
//平台有 盒子没有 盒子开启
//平台没有 盒子有 盒子关闭
void MainWindow::startCamera(const QString &httpurl){
Common & instace= Common::getInstance();
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
QString nonConstHttpUrl = std::remove_const<QString>::type(httpurl);
vides_data::responseDeviceData devices;
// QString serialNumber = QSysInfo::machineUniqueId();
QString serialNumber;
findLocalSerialNumber(serialNumber);
HttpService httpService(httpurl);
vides_data::response *re= httpService.httpFindCameras(serialNumber,devices);
if(re->code==0){
if(devices.list.size()<=0){
instace.deleteObj(re);
return;
}
QString username = qSetting->value("devices/username").toString();
QString password = qSetting->value("devices/password").toString();
std::map<QString,vides_data::localDeviceStatus*> localDevices;
mediaFaceImage->SdkSearchDevicesSyn(localDevices);
if(localDevices.size()<=0){
instace.deleteObj(re);
return ;
}
for (const auto& device : devices.list) {
if(localDevices.count(device.sSn)>0 ){
vides_data::localDeviceStatus* localDevice= localDevices.at(device.sSn);
QString ipAddress= QString("%1.%2.%3.%4").arg(localDevice->HostIP.c[0]).arg(localDevice->HostIP.c[1]).arg(localDevice->HostIP.c[2]).arg(localDevice->HostIP.c[3]);
//this->gatewayRandomIp(ipAddress);
QString key = ipAddress + ":" + QString::number(localDevice->TCPPort);
if(faceDetectionParkingPushs.count(key)<=0){
httpService.setHttpUrl(httpurl);
vides_data::cameraParameters parameter;
parameter.sDevId=ipAddress;
parameter.nDevPort=localDevice->TCPPort;
parameter.sUserName=username;
parameter.sPassword=password;
parameter.channel=localDevice->ChannelNum;
parameter.httpUrl=nonConstHttpUrl;
parameter.sSn=device.sSn;
//parameter.rtspUrl="rtsp://192.168.10.131:554/user=admin&password=&channel=1&stream=1.sdp?";
//parameter.rtspUrl=std::move(QString("rtsp://admin:@%1/stream1").arg(ipAddress));
this->initCameras(parameter,device.areas);
}
else {
auto it = this->faceDetectionParkingPushs.find(key);
if (it != this->faceDetectionParkingPushs.end()) {
CameraHandle* offlineCameraHandle = it->second; // 注意使用->second获取值
if(!offlineCameraHandle->compareLists(device.areas)){
offlineCameraHandle->updateParkMapAndParkingSpaceInfos(device.areas);
}
}
}
}
}
this->deleteCloudNotCamer(localDevices, devices.list);
for (auto& pair : localDevices) {
if (pair.second != nullptr) { // 如果对象未被删除(即不为nullptr)
instace.deleteObj(pair.second);
}
}
// 清空 localDevices 容器
localDevices.clear();
}
vides_data::requestDeviceStatus reStatus;
reStatus.sSn=serialNumber;
reStatus.status=1;
reStatus.type=1;
reStatus.ip_addr=instace.GetLocalIp();
qDebug()<<"local.ip_addr===>"<<reStatus.ip_addr;
httpService.setHttpUrl(httpurl);
qDebug()<<"httpurl===>"<<httpurl;
qDebug()<<"serialNumber===>"<<serialNumber;
vides_data::response *res=httpService.httpPostDeviceStatus(reStatus);
if(res->code!=0){
qInfo()<<"盒子状态上报失败 code:"<<res->code<<"msg:"<<res->data;
}
updateLocalFace(httpurl);
instace.deleteObj(re);
instace.deleteObj(res);
}
bool MainWindow::isDeviceInList(const QString& deviceId, const std::list<vides_data::responseDeviceStatus>& devices) {
auto it = std::find_if(devices.begin(), devices.end(), [&deviceId](const vides_data::responseDeviceStatus& device) {
return deviceId == device.sSn; // 假设 sSn 是 std::string 类型,需要转换
});
return it != devices.end(); // 如果迭代器不是end,说明找到了匹配项
}
//从localDevices中过滤出不在devices列表中的设备
void MainWindow::deleteCloudNotCamer(const std::map<QString, vides_data::localDeviceStatus*>& localDevices,
const std::list<vides_data::responseDeviceStatus>& devices) {
for (auto localDevice : localDevices) {
vides_data::localDeviceStatus* value = localDevice.second; // 使用 .second 访问值
if (!this->isDeviceInList(value->sSn, devices)) {
QString ipAddress = QString("%1.%2.%3.%4").arg(value->HostIP.c[0]).arg(value->HostIP.c[1]).arg(value->HostIP.c[2]).arg(value->HostIP.c[3]);
QString key = ipAddress + ":" + QString::number(value->TCPPort);
clearOfflineCameraHandle(ipAddress,value->TCPPort);
}
}
}
void MainWindow::initDevConfigSyn(CameraHandle *cameraHandle){
Common & instace= Common::getInstance();
QString time= instace.getTimeString();
cameraHandle->sdkDevSystemTimeZoneSyn(time);
QString recor;
iniRecordingToString(recor);
QByteArray bRecor =recor.toUtf8();
const char* cRecor=bRecor.data();
cameraHandle->sdkRecordCfg(cRecor);
QString enCode;
iniEncodeToString(enCode);
QByteArray bCode =enCode.toUtf8();
const char* cCode=bCode.data();
cameraHandle->sdkEncodeCfg(cCode);
}
void MainWindow::iniEncodeToString(QString &enCodeJson) {
// 创建 JSON 对象
QJsonObject rootObject;
// 添加 ExtraFormat 到 JSON 对象中
QJsonObject extraFormatObject;
QJsonObject videoObjectExtra = {
// {"BitRate", qSetting->value("ExtraFormat/Video.BitRate").toInt()},
{"BitRateControl", qSetting->value("ExtraFormat/Video.BitRateControl").toString()},
{"Compression", qSetting->value("ExtraFormat/Video.Compression").toString()},
{"FPS", qSetting->value("ExtraFormat/Video.FPS").toInt()},
{"GOP", qSetting->value("ExtraFormat/Video.GOP").toInt()},
{"Quality", qSetting->value("ExtraFormat/Video.Quality").toInt()},
{"Resolution", qSetting->value("ExtraFormat/Video.Resolution").toString()},
{"VirtualGOP", qSetting->value("ExtraFormat/Video.VirtualGOP").toInt()}
};
extraFormatObject["VideoEnable"] = qSetting->value("ExtraFormat/VideoEnable").toBool();
extraFormatObject["AudioEnable"] = qSetting->value("ExtraFormat/AudioEnable").toBool();
extraFormatObject["Video"] = videoObjectExtra;
rootObject["ExtraFormat"] = extraFormatObject;
// 添加 MainFormat 到 JSON 对象中
QJsonObject mainFormatObject;
QJsonObject videoObjectMain = {
// {"BitRate", qSetting->value("MainFormat/Video.BitRate").toInt()},
{"BitRateControl", qSetting->value("MainFormat/Video.BitRateControl").toString()},
{"Compression", qSetting->value("MainFormat/Video.Compression").toString()},
{"FPS", qSetting->value("MainFormat/Video.FPS").toInt()},
{"GOP", qSetting->value("MainFormat/Video.GOP").toInt()},
{"Quality", qSetting->value("MainFormat/Video.Quality").toInt()},
{"Resolution", qSetting->value("MainFormat/Video.Resolution").toString()},
{"VirtualGOP", qSetting->value("MainFormat/Video.VirtualGOP").toInt()}
};
mainFormatObject["VideoEnable"] = qSetting->value("MainFormat/VideoEnable").toBool();
mainFormatObject["AudioEnable"] = qSetting->value("MainFormat/AudioEnable").toBool();
mainFormatObject["Video"] = videoObjectMain;
rootObject["MainFormat"] = mainFormatObject;
QJsonArray jsonArray;
jsonArray.append(rootObject);
// 将 JSON 对象转换为 JSON 文档
QJsonDocument jsonDocument(jsonArray);
enCodeJson = QString::fromUtf8(jsonDocument.toJson());
}
bool MainWindow::iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &workSpWMn,QString &sn){
qDebug()<<"iniWorkSpVMn=="<<sn;
QString jsonfile=QCoreApplication::applicationDirPath()+"/camera_config.json";
bool isEqual=true;
// 读取 JSON 配置文件
QFile file(jsonfile);
if (!file.open(QIODevice::ReadOnly)) {
qDebug() << "Failed to open the camera_config.json";
return isEqual;
}
QJsonObject toJsonObject;
// 解析 JSON 数据
QByteArray jsonData = file.readAll();
file.close();
QJsonDocument jsonDoc = QJsonDocument::fromJson(jsonData);
QJsonObject rootObj = jsonDoc.object();
// 获取 cameraconfigs 对象
QJsonArray cameraConfigs = rootObj.value("cameraconfigs").toArray();
bool found = false;
for (int i = 0; i < cameraConfigs.size(); ++i) {
QJsonObject config = cameraConfigs.at(i).toObject();
if (config.contains("sn") && config.value("sn").toString() == sn) {
found = true;
QString Camreaid = config.value("Camreaid").toString();
int iHsIntervalTime = config.value("iHsIntervalTime").toInt();
int iRsAgedTime = config.value("iRsAgedTime").toInt();
int sCsPort = config.value("sCsPort").toInt();
QString szConnPass = config.value("szConnPass").toString();
QString szCsIP = config.value("szCsIP").toString();
QString szDeviceNO = config.value("szDeviceNO").toString();
QString szServerDn = config.value("szServerDn").toString();
QString szServerNo = config.value("szServerNo").toString();
isEqual = (szCsIP == gb28181->sip_ip &&
sCsPort == gb28181->sip_port &&
szServerNo == gb28181->serial &&
szServerDn ==gb28181->realm &&
iRsAgedTime == gb28181->register_validity &&
iHsIntervalTime == gb28181->heartbeat_interval &&
szConnPass == gb28181->password &&
szDeviceNO == gb28181->device_id &&
Camreaid == gb28181->channel_id);
if(!isEqual){
config["Camreaid"]=gb28181->channel_id;
config["szCsIP"]=gb28181->sip_ip ;
config["szServerNo"]=gb28181->serial;
config["sCsPort"]=gb28181->sip_port;
config["szServerDn"]=gb28181->realm;
config["iRsAgedTime"]=gb28181->register_validity;
config["iHsIntervalTime"]=gb28181->heartbeat_interval;
config["szConnPass"]=gb28181->password;
config["szDeviceNO"]=gb28181->device_id;
config["sn"]=sn;
toJsonObject["szCsIP"]=gb28181->sip_ip ;
toJsonObject["szServerNo"]=gb28181->serial;
toJsonObject["sCsPort"]=gb28181->sip_port;
toJsonObject["szServerDn"]=gb28181->realm;
toJsonObject["iRsAgedTime"]=gb28181->register_validity;
toJsonObject["iHsIntervalTime"]=gb28181->heartbeat_interval;
toJsonObject["szConnPass"]=gb28181->password;
toJsonObject["szDeviceNO"]=gb28181->device_id;
QVariantList variantList;
for (int i = 0; i < 64; ++i) {
variantList.append(QVariant(0));
}
QJsonArray levelArray = QJsonArray::fromVariantList(variantList);
toJsonObject["AlarmLevel"]=levelArray;
QStringList alarmidStrings;
for (int i = 1; i <= 64; ++i) {
alarmidStrings.append("3402000000134000000" + QString::number(i, 10).rightJustified(2, '0'));
}
QJsonArray alarmidArray = QJsonArray::fromStringList(alarmidStrings);
toJsonObject["Alarmid"]=alarmidArray;
QVariantList variantListLevel;
for (int i = 0; i < 64; ++i) {
variantListLevel.append(QVariant(0));
}
QJsonArray camreaLevelArray = QJsonArray::fromVariantList(variantListLevel);
toJsonObject["CamreaLevel"]=camreaLevelArray;
QStringList camreaidStrings;
for (int i = 1; i <= 64; ++i) {
if(i==1){
camreaidStrings.append(gb28181->channel_id);
}else{
camreaidStrings.append("3402000000131000001" + QString::number(i, 10).rightJustified(2, '0'));
}
}
QJsonArray camreaidArray = QJsonArray::fromStringList(camreaidStrings);
toJsonObject["Camreaid"]=camreaidArray;
toJsonObject["HeartBeatCount"] = 0;
toJsonObject["bCsEnable"] = true;
toJsonObject["uiAlarmStateBlindEnable"] = static_cast<qint64>(4294967295);
toJsonObject["uiAlarmStateConnectEnable"] = static_cast<qint64>(4294967295);
toJsonObject["uiAlarmStateGpinEnable"] = static_cast<qint64>(4294967295);
toJsonObject["uiAlarmStateLoseEnable"] = static_cast<qint64>(4294967295);
toJsonObject["uiAlarmStateMotionEnable"] = static_cast<qint64>(4294967295);
toJsonObject["uiAlarmStatePerformanceEnable"] = static_cast<qint64>(4294967295);
toJsonObject["sUdpPort"] = 5060;
cameraConfigs[i] = config; // 更新数组中的配置
break;
}
}
}
if (!found) {
QJsonObject newValue;
newValue["sn"]=sn;
newValue["Camreaid"]=gb28181->channel_id;
newValue["szCsIP"]=gb28181->sip_ip ;
newValue["szServerNo"]=gb28181->serial;
newValue["sCsPort"]=gb28181->sip_port;
newValue["szServerDn"]=gb28181->realm;
newValue["iRsAgedTime"]=gb28181->register_validity;
newValue["iHsIntervalTime"]=gb28181->heartbeat_interval;
newValue["szConnPass"]=gb28181->password;
newValue["szDeviceNO"]=gb28181->device_id;
toJsonObject["szCsIP"]=gb28181->sip_ip ;
toJsonObject["szServerNo"]=gb28181->serial;
toJsonObject["sCsPort"]=gb28181->sip_port;
toJsonObject["szServerDn"]=gb28181->realm;
toJsonObject["iRsAgedTime"]=gb28181->register_validity;
toJsonObject["iHsIntervalTime"]=gb28181->heartbeat_interval;
toJsonObject["szConnPass"]=gb28181->password;
toJsonObject["szDeviceNO"]=gb28181->device_id;
QVariantList variantList;
for (int i = 0; i < 64; ++i) {
variantList.append(0);
}
QJsonArray levelArray = QJsonArray::fromVariantList(variantList);
toJsonObject["AlarmLevel"]=levelArray;
QStringList alarmidStrings;
for (int i = 1; i <= 64; ++i) {
alarmidStrings.append("3402000000134000000" + QString::number(i, 10).rightJustified(2, '0'));
}
QJsonArray alarmidArray = QJsonArray::fromStringList(alarmidStrings);
toJsonObject["Alarmid"]=alarmidArray;
QJsonArray camreaLevelArray;
for (int i = 0; i < 64; ++i) {
camreaLevelArray.append(0);
}
toJsonObject["CamreaLevel"]=camreaLevelArray;
QStringList camreaidStrings;
for (int i = 1; i <= 64; ++i) {
if(i==1){
camreaidStrings.append(gb28181->channel_id);
}else{
camreaidStrings.append("3402000000131000001" + QString::number(i, 10).rightJustified(2, '0'));
}
}
QJsonArray camreaidArray = QJsonArray::fromStringList(camreaidStrings);
toJsonObject["Camreaid"]=camreaidArray;
toJsonObject["HeartBeatCount"] = 0;
toJsonObject["bCsEnable"] = true;
toJsonObject["uiAlarmStateBlindEnable"] = static_cast<qint64>(4294967295);
toJsonObject["uiAlarmStateConnectEnable"] = static_cast<qint64>(4294967295);
toJsonObject["uiAlarmStateGpinEnable"] = static_cast<qint64>(4294967295);
toJsonObject["uiAlarmStateLoseEnable"] = static_cast<qint64>(4294967295);
toJsonObject["uiAlarmStateMotionEnable"] = static_cast<qint64>(4294967295);
toJsonObject["uiAlarmStatePerformanceEnable"] = static_cast<qint64>(4294967295);
toJsonObject["sUdpPort"] = 5060;
isEqual=false;
cameraConfigs.append(newValue); // 添加新的配置到数组中
}
if(!isEqual){
QJsonDocument doc(toJsonObject);
QByteArray jsonData = doc.toJson(QJsonDocument::Compact);
workSpWMn = QString::fromUtf8(jsonData);
// 更新 JSON 数据
rootObj["cameraconfigs"] = cameraConfigs;
file.setFileName(jsonfile);
if (!file.open(QIODevice::WriteOnly)) {
qWarning("Cannot open file for writing");
return true;
}
QJsonDocument saveDoc(rootObj);
file.write(saveDoc.toJson());
file.close();
}
return isEqual;
}
void MainWindow::iniRecordingToString(QString &recorJson){
QJsonObject jsonObject;
// 读取 Mask 数据
QJsonArray maskArray;
// 遍历所有掩码
for (int i = 1; i <= 7; i++) {
QString maskKey = QString("Mask/Mask_%1").arg(i);
// 读取掩码值。存储为QStringList,就像在ini文件中定义的一样
QStringList maskValues = qSetting->value(maskKey).toStringList();
QJsonArray maskSubArray;
foreach (const QString &value, maskValues) {
maskSubArray.append(value.trimmed());
}
maskArray.append(maskSubArray);
}
jsonObject["Mask"] = maskArray;
// 读取 Packet 数据
jsonObject["PacketLength"] =qSetting->value("Packet/PacketLength").toInt();
jsonObject["PreRecord"] = qSetting->value("Packet/PreRecord").toInt();
jsonObject["RecordMode"] = qSetting->value("Packet/RecordMode").toString();
jsonObject["Redundancy"] = qSetting->value("Packet/Redundancy").toBool();
// 读取 TimeSection 数据
QJsonArray timeArray;
for (int ts = 1; ts <= 7; ts++) {
QString tsKey = QString("TimeSection/TimeSection_%1").arg(ts);
// 读取掩码值。存储为QStringList,就像在ini文件中定义的一样
QStringList tsValues = qSetting->value(tsKey).toStringList();
QJsonArray timeSubArray;
foreach (const QString &value, tsValues) {
timeSubArray.append(value.trimmed());
}
timeArray.append(timeSubArray);
}
jsonObject["TimeSection"] = timeArray;
QJsonArray jsonArray;
jsonArray.append(jsonObject);
QJsonDocument jsonDocument(jsonArray);
recorJson = QString::fromUtf8(jsonDocument.toJson());
}
void MainWindow::initCameras(vides_data::cameraParameters &parameter,const std::list<vides_data::responseArea>&areas){
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
float carConfidence=qSetting->value("devices/carConfidence").toFloat();
int image_save=qSetting->value("devices/image_save").toInt();
CameraHandle * cameraHandle =new CameraHandle(parameter.sDevId,parameter.httpUrl,parameter.sSn,parameter.channel,modelPaths,carConfidence,image_save);
int sdk_handle=cameraHandle->sdkDevLoginSyn(parameter.sDevId,parameter.nDevPort,parameter.sUserName,parameter.sPassword,10000);
qDebug()<<"句柄为2:"<<sdk_handle;
if(sdk_handle<=0){
qInfo() << "登录失败";
return ;
}
initDevConfigSyn(cameraHandle);
mediaFaceImage->setMap(sdk_handle,cameraHandle);
cameraHandle->sdkDevSetAlarmListener(sdk_handle,1);
int synTime=qSetting->value("timer/dev_snap_syn_timer").toInt();
cameraHandle->initSdkRealTimeDevSnapSyn(sdk_handle,synTime);
int seTime=qSetting->value("timer/semaphore_time").toInt();
cameraHandle->setTimeoutMs(seTime);
cameraHandle->initParkingSpaceInfo(areas);
Common & instace= Common::getInstance();
QString key =parameter.sDevId + ":" + QString::number(parameter.nDevPort);
faceDetectionParkingPushs[key]= cameraHandle;
HttpService httpService(parameter.httpUrl);
vides_data::response *res=httpService.httpFindGb28181Config(parameter.sSn);
if(res->code!=0){
qInfo()<<"请求摄像头gb28181配置失败";
instace.deleteObj(res);
return;
}
vides_data::responseGb28181 *gb281 = reinterpret_cast<vides_data::responseGb28181*>(res->data);
QString stGb281;
bool re= iniWorkSpVMn(gb281,stGb281,parameter.sSn);
if(!re){
QByteArray bGb =stGb281.toUtf8();
const char* cGb=bGb.data();
cameraHandle->sdkDevSpvMn(cGb);
}
instace.deleteObj(gb281);
instace.deleteObj(res);
}
void MainWindow::setVideoPath(int flag, const QString& path) {
Common& instance = Common::getInstance();
switch (flag) {
case 0x00:
instance.setVideoOut(path);
break;
case 0x01:
instance.setVideoDownload(path);
break;
case 0x02:
instance.setImages(path);
break;
default:
// 处理未知的标志值
break;
}
}
void MainWindow::createDirectory(int flag,const QString& dirName, const QString& successMsg, const QString& failureMsg) {
QDir directory;
if (directory.exists(dirName)) {
qDebug() << successMsg << "目录已存在";
setVideoPath(flag, directory.absoluteFilePath(dirName));
} else {
if (directory.mkdir(dirName)) {
qDebug() << successMsg << "目录创建成功";
setVideoPath(flag, directory.absoluteFilePath(dirName));
} else {
qDebug() << failureMsg << "目录创建失败";
}
}
}
void MainWindow::initVideoOutPath(){
createDirectory(0x01,"frame_images", "目录创建成功", "目录创建失败");
createDirectory(0x00,"frame_video", "创建视频目录成功", "视频目录创建失败");
createDirectory(0x02,"images", "图片目录创建成功", "图片目录创建失败");
}
MainWindow::~MainWindow()
{
//delete ui;
Common & instace= Common::getInstance();
instace.deleteObj(qSetting);
instace.deleteObj(deleteLogFileTimer);
instace.deleteObj(deleteFrameFileTimer);
instace.deleteObj(dePermissionSynTimer);
for(auto iter = faceDetectionParkingPushs.begin(); iter != faceDetectionParkingPushs.end(); ++iter) {
instace.deleteObj( iter->second);
}
// 清空 handleMap
faceDetectionParkingPushs.clear();
LogHandler::Get().uninstallMessageHandler();
}
void MainWindow::deleteMkvFileTimer(){
Common& instance = Common::getInstance();
QDir dir(instance.getVideoOut());
QStringList filters;
filters << "*.mp4" << "*.avi" << "*.jpg" << "*.mkv"; // 根据需要添加其他视频格式
dir.setNameFilters(filters);
QFileInfoList fileList = dir.entryInfoList(QDir::Files | QDir::NoDotAndDotDot);
foreach (QFileInfo fileInfo, fileList) {
QDateTime createTime = fileInfo.metadataChangeTime();
QDateTime now = QDateTime::currentDateTime();
if (createTime.secsTo(now) > 24 * 3600) { // 超过72小时
if (!QFile::remove(fileInfo.absoluteFilePath())) {
qInfo() << "Failed to delete file:" << fileInfo.fileName();
}
}
}
}
void MainWindow::deleteLogFile(){
QDateTime now = QDateTime::currentDateTime();
QDir logDir("log");
// 前3天
QDateTime dateTime1 = now.addDays(-3);
QDateTime dateTime2;
QString logPath = logDir.absoluteFilePath(""); // 日志的路径
QDir dir(logPath);
QStringList filename ;
filename << "*.log";//可叠加,可使用通配符筛选
QFileInfoList fileList = dir.entryInfoList(filename);
foreach (QFileInfo f, fileList) {
// "."和".."跳过
if (f.baseName() == "" || f.baseName()=="today" )
continue;
dateTime2 = QDateTime::fromString(f.baseName(), "yyyy-MM-dd");
if (dateTime2 < dateTime1) { // 只要日志时间小于前3天的时间就删除
dir.remove(f.absoluteFilePath());
}
}
}
void MainWindow::initFaceFaceRecognition() {
qSetting->beginGroup("cloudImageMap");
QStringList keys = qSetting->childKeys();
foreach(QString key, keys) {
QString value = qSetting->value(key).toString();
cloudImageMap[key]=value;
}
qSetting->endGroup();
qSetting->beginGroup("localImageMap");
QStringList lokeys = qSetting->childKeys();
foreach(QString lk, lokeys) {
// 获取键对应的值
QString value = qSetting->value(lk).toString();
localImageMap[lk]=value;
}
qSetting->endGroup();
}
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include "Common.h"
#include "FaceRecognition.h"
#include "LicensePlateRecognition.h"
#include "hyper_lpr_sdk.h"
#include "CameraHandle.h"
#include "HttpService.h"
#include "VidesData.h"
#include "MediaFaceImage.h"
#include "HumanDetection.h"
#include <algorithm>
#include <QString>
#include <QTextCodec>
#include <QObject>
#include <QByteArray>
#include <QSettings>
#include <QTimer>
#include <QSemaphore>
#include <QDebug>
#include <QResource>
#include <opencv2/opencv.hpp>
#include <QRandomGenerator>
#include <QTcpServer>
#include <QTcpSocket>
//namespace Ui { class MainWindow; }
class MainWindow : public QObject
{
Q_OBJECT
public:
explicit MainWindow();
void initVideoOutPath();
void setVideoPath(int flag, const QString& path);
void createDirectory(int flag,const QString& dirName, const QString& successMsg, const QString& failureMsg);
void initFaceFaceRecognition();
void initCameras(vides_data::cameraParameters &parameter,const std::list<vides_data::responseArea>&areas);
static MainWindow * sp_this;
void sendJsonResponse(QTcpSocket* socket, int code, const QString& data, const QString& msg);
void sendEmptyResponse(QTcpSocket* socket);
void sendNotFoundResponse(QTcpSocket* socket);
void updateLocalFace(const QString &httpurl);
void removeImageFiles(QString id);
void modifyImagesAndNames(QString &modId);
void findLocalSerialNumber(QString &serialNumber);
void initDevConfigSyn(CameraHandle *cameraHandle);
void iniRecordingToString(QString &recorJson);
void iniEncodeToString(QString &enCodeJson);
void clearOfflineCameraHandle(QString sDevId, int nDevPort);
bool iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &workSpWMn,QString &sn);
bool isDeviceInList(const QString& deviceId, const std::list<vides_data::responseDeviceStatus>& devices);
// 过滤函数
void deleteCloudNotCamer (const std::map<QString,vides_data::localDeviceStatus*>& localDevices,
const std::list<vides_data::responseDeviceStatus>& devices);
~MainWindow();
signals:
void shutdownSignals(QString sDevId, int nDevPort);
private slots:
void startCamera(const QString &httpurl);
void deleteLogFile();
void clearHandle(QString sDevId, int nDevPort);
void deleteMkvFileTimer();
void handleMatNewConnection();
private:
//Ui::MainWindow *ui;
QSettings *qSetting;
QTimer *deleteLogFileTimer;
QTimer *deleteFrameFileTimer;
QTimer*dePermissionSynTimer;
QTcpServer server;
//本地id:图片路径
std::map<QString,QString>localImageMap;
//云端id:oss路径
std::map<QString,QString>cloudImageMap;
QString modelPaths;
std::map<QString,CameraHandle*>faceDetectionParkingPushs;
};
#endif // MAINWINDOW_H
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>MainWindow</class>
<widget class="QMainWindow" name="MainWindow">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>987</width>
<height>600</height>
</rect>
</property>
<property name="windowTitle">
<string>MainWindow</string>
</property>
<widget class="QWidget" name="centralwidget">
<widget class="QWidget" name="layoutWidget">
<property name="geometry">
<rect>
<x>170</x>
<y>120</y>
<width>610</width>
<height>301</height>
</rect>
</property>
<layout class="QVBoxLayout" name="verticalLayout">
<item>
<layout class="QHBoxLayout" name="horizontalLayout">
<item>
<widget class="QLabel" name="label">
<property name="text">
<string>设备名称:</string>
</property>
</widget>
</item>
<item>
<widget class="QComboBox" name="device">
<property name="currentText">
<string/>
</property>
</widget>
</item>
</layout>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_5">
<item>
<widget class="QLabel" name="label_2">
<property name="text">
<string>起始时间:</string>
</property>
</widget>
</item>
<item>
<widget class="QDateTimeEdit" name="startTimer">
<property name="date">
<date>
<year>2024</year>
<month>1</month>
<day>17</day>
</date>
</property>
<property name="time">
<time>
<hour>0</hour>
<minute>0</minute>
<second>0</second>
</time>
</property>
<property name="maximumTime">
<time>
<hour>23</hour>
<minute>59</minute>
<second>59</second>
</time>
</property>
<property name="displayFormat">
<string>yyyy-MM-dd hh:mm:ss</string>
</property>
</widget>
</item>
<item>
<widget class="QLabel" name="label_3">
<property name="text">
<string>截止时间:</string>
</property>
</widget>
</item>
<item>
<widget class="QDateTimeEdit" name="endTimer">
<property name="dateTime">
<datetime>
<hour>10</hour>
<minute>42</minute>
<second>31</second>
<year>2024</year>
<month>1</month>
<day>17</day>
</datetime>
</property>
<property name="date">
<date>
<year>2024</year>
<month>1</month>
<day>17</day>
</date>
</property>
<property name="displayFormat">
<string>yyyy-MM-dd hh:mm:ss</string>
</property>
</widget>
</item>
</layout>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_7">
<item>
<widget class="QLabel" name="label_4">
<property name="text">
<string>相机名称:</string>
</property>
</widget>
</item>
<item>
<widget class="QLineEdit" name="camera_name"/>
</item>
<item>
<widget class="QPushButton" name="pushButton">
<property name="text">
<string>截取视频</string>
</property>
</widget>
</item>
<item>
<widget class="QLabel" name="label_5">
<property name="text">
<string>视频路径:</string>
</property>
</widget>
</item>
<item>
<widget class="QLineEdit" name="video_path"/>
</item>
</layout>
</item>
<item>
<widget class="QFrame" name="frame">
<property name="frameShape">
<enum>QFrame::StyledPanel</enum>
</property>
<property name="frameShadow">
<enum>QFrame::Raised</enum>
</property>
<layout class="QHBoxLayout" name="horizontalLayout_2">
<item>
<widget class="QTableWidget" name="tableWidget">
<column>
<property name="text">
<string>设备名称</string>
</property>
</column>
<column>
<property name="text">
<string>当前时间</string>
</property>
</column>
<column>
<property name="text">
<string>车牌号</string>
</property>
</column>
</widget>
</item>
</layout>
</widget>
</item>
</layout>
</widget>
</widget>
</widget>
<resources/>
<connections/>
</ui>
mask.png

757 KB

<RCC>
<qresource prefix="/images"/>
</RCC>
#ifndef XXX_H
#define XXX_H
#endif // XXX_H
yifei.jpg

50.6 KB

Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment