Commit d6c2042f by liusq

非ui的盒子代码

parents
# This file is used to ignore files which are generated
# ----------------------------------------------------------------------------
*~
*.autosave
*.a
*.core
*.moc
*.o
*.obj
*.orig
*.rej
*.so
*.so.*
*_pch.h.cpp
*_resource.rc
*.qm
.#*
*.*#
core
!core/
tags
.DS_Store
.directory
*.debug
Makefile*
*.prl
*.app
moc_*.cpp
ui_*.h
qrc_*.cpp
Thumbs.db
*.res
*.rc
/.qmake.cache
/.qmake.stash
# qtcreator generated files
*.pro.user*
# xemacs temporary files
*.flc
# Vim temporary files
.*.swp
# Visual Studio generated files
*.ib_pdb_index
*.idb
*.ilk
*.pdb
*.sln
*.suo
*.vcproj
*vcproj.*.*.user
*.ncb
*.sdf
*.opensdf
*.vcxproj
*vcxproj.*
# MinGW generated files
*.Debug
*.Release
# Python byte code
*.pyc
# Binaries
# --------
*.dll
*.exe
<RCC>
<qresource prefix="/gamera">
<file>gameras.ini</file>
</qresource>
<qresource prefix="/images">
<file>test_data/0.jpg</file>
<file>test_data/1.jpg</file>
<file>test_data/2.jpg</file>
<file>test_data/fake.jpg</file>
<file>test_data/hasface.jpg</file>
<file>test_data/mask.jpg</file>
<file>test_data/noface.jpg</file>
<file>test_data/nomask.jpg</file>
<file>test_data/p1.jpg</file>
<file>test_data/p2.jpg</file>
<file>test_data/p3.jpg</file>
<file>test_data/real.jpg</file>
<file>test_data/crop.png</file>
<file>yifei.jpg</file>
<file>rgb_fake.jpg</file>
<file>mask.png</file>
<file>Kunkun.jpg</file>
<file>kun.jpg</file>
<file>face_sample.png</file>
<file>face_comp.jpeg</file>
<file>cxk.jpg</file>
<file>crop.png</file>
<file>blur.jpg</file>
</qresource>
<qresource prefix="/model_zip">
<file>T1</file>
<file>T1.index</file>
</qresource>
<qresource prefix="/lprv3u_models">
<file>rpv3_mdict_160h.mnn</file>
<file>litemodel_cls_96xh.mnn</file>
<file>b320_backbone_h.mnn</file>
<file>b320_header_h.mnn</file>
<file>b640x_backbone_h.mnn</file>
<file>b640x_head_h.mnn</file>
</qresource>
</RCC>
#ifndef CAMERAHANDLE_H
#define CAMERAHANDLE_H
#include "RecognitionInfo.h"
#include "FaceRecognition.h"
#include "HttpService.h"
#include "LicensePlateRecognition.h"
#include "Json_Header/AlarmInfo.h"
#include "Json_Header/NetWork_NetCommon.h"
#include "Json_Header/OPTimeSetting.h"
#include "Json_Header/Simplify_Encode.h"
#include "Json_Header/System_TimeZone.h"
#include "Json_Header/RecordCfg.h"
#include "Json_Header/NetWork_SPVMN.h"
#include "mainwindow.h"
#include "ParkingSpaceInfo.h"
#include "so_human_sdk.h"
#include "hyper_lpr_sdk.h"
#include <QPolygon>
#include <QPainterPath>
#include <QProcess>
#include <QDateTime>
#include <QJsonDocument>
#include <memory>
#include <QString>
#include <QObject>
#include <QTimer>
#include <QThreadPool>
#include <QQueue>
#include <opencv2/opencv.hpp>
#include <QSemaphore>
enum CAR_INFORMATION {
Exit, //出场
Mobilization, //进场
ExitAndMobilization //
};
class CameraHandle: public QObject {
Q_OBJECT
public:
CameraHandle(QString &url,QString &httpUrl,QString &sSn, int &channel,const QString &modelPaths,
float carConfidence,int imageSave);
CameraHandle();
~CameraHandle();
int sdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
//int SdkMediaGetFaceImage(int hDevice, int nSeq, int nTimeout);
int sdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener);
int getHdevice();
int getChannel();
void clearCameraHandle();
// void rebindTimer(int hDevice);
void initSdkRealTimeDevSnapSyn(int hDevice,int syn_timer);
void updateImage(const cv::Mat & frame,qint64 currentTime);
void matToBase64(const cv::Mat &image, QByteArray &base64Data);
int callbackFunction(XSDK_HANDLE hObject,QString &szString);
void checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Mat & frame,RecognizedInfo& newInfo,int &result,std::map<int,RecognizedInfo>&exitAndMoMap);
void licensePlateRecognitionResults(vides_data::requestLicensePlate &location);
void sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel);
//时间设置
void sdkDevSystemTimeZoneSyn(QString &time);
//录像设置
void sdkRecordCfg(const char * recordJson);
//配置编码设置
void sdkEncodeCfg(const char *enCode);
//28181更新
void sdkDevSpvMn(const char* spvMn);
void sdkDownloadFileByTime(XSDK_HANDLE hDevice,int id,
QString startTimer,QString endTime);
void setTimeoutMs(int timeoutMs);
bool acquireAndReleaseWithTimeout(bool flag);
QString getSSn();
int getMediaHandle();
void setMediaHandle(int mediaHandle);
void setCurrentFace(int currentFace);
void initParkingSpaceInfo(const std::list<vides_data::responseArea>&areas);
bool compareLists(const std::list<vides_data::responseArea>& newAreas);
void updateParkMapAndParkingSpaceInfos(const std::list<vides_data::responseArea>&newAreas);
std::map<int, vides_data::responseRecognitionData>&getVideoCurrentData();
std::map<QString, QString>&getCurrentData();
// 检查点是否在多边形内
bool polygonsOverlap(ParkingSpaceInfo &poly1, ParkingSpaceInfo &poly2);
// 计算两个多边形的交集面积
double calculateIntersectionArea(const QPolygonF &polygon1, const QPolygonF &polygon2);
double ccw(const QPointF& a, const QPointF& b, const QPointF& c);
void getCurrentFrame(std::vector<uchar> &buffer);
int findPointRegion(ParkingSpaceInfo &prakArea);
int determineArea(ParkingSpaceInfo &prakArea);
signals:
void callbackFrameReady(const cv::Mat &frame, const QString &url);
void afterDownloadFile( int id,int recognitionType,QString ossUrl);
private slots:
void sdkRealTimeDevSnapSyn(int hDevice);
void pushRecordToCloud(int id,int recognitionType,QString ossUrl);
void releaseSemaphore();
private :
int hDevice;
int channel;
QString httpUrl;
SXSDKLoginParam *loginParam;
SXMediaFaceImageReq *sxMediaFaceImageReq;
std::mutex plateMutex;
std::mutex faceMutex;
QString sSn;
QString url;
std::map<int, vides_data::responseRecognitionData> videoCurrentData;
std::map<QString, QString> currentData;
//每个区域编号对应一个区域信息
std::map<int,ParkingSpaceInfo*>parkMap;
//当前相机监视所以车位区域
std::vector<ParkingSpaceInfo*>parkingSpaceInfos;
//当前人脸数
int currentFace;
int mediaHandle;
//2秒钟抓一次图
QTimer *dev_snap_syn_timer;
QTimer *release_timer;
int offlineCount=0;
TCV_HumanDetector *detector;
P_HLPR_Context ctx ;
QSemaphore semaphore;
int timeoutMs;
int image_save;
};
#endif // CAMERAHANDLE_H
#include "CameraRealThread.h"
#include <QDebug>
#include <QProcess>
extern "C" {
#include <libavformat/avformat.h>
#include <libavcodec/avcodec.h>
#include <libavutil/avutil.h>
#include <libavutil/imgutils.h>
#include <libswscale/swscale.h>
}
CameraRealThread::CameraRealThread(const QString& rtspUrl, const QString& rtmpUrl) : m_rtspUrl(rtspUrl), m_rtmpUrl(rtmpUrl) {
avformat_network_init();
}
CameraRealThread::~CameraRealThread(){
// avformat_network_deinit();
}
void CameraRealThread::run(){
// 打开 RTSP 流
QByteArray rtByteArray = m_rtspUrl.toUtf8();
const char* rt_url = rtByteArray.data();
QByteArray rmByteArray = m_rtmpUrl.toUtf8();
const char* rm_url ="flv://rtmp://push.sydip.com/live/44ad6edc63896566?auth_key=1707189971-0-0-dbac3b3c1f29652323355096908b2a62" ;//rmByteArray.data();
qDebug() << "rm_url"<<rm_url;
QProcess process;
process.start("ffmpeg -formats | grep flv");
process.waitForFinished();
// 读取ffmpeg命令的输出结果
QString output = process.readAllStandardOutput();
// 检查输出结果中是否包含FLV格式信息
if (output.contains("FLV (Flash Video)")) {
qDebug() << "FFmpeg支持FLV格式";
} else {
qDebug() << "FFmpeg不支持FLV格式";
}
avformat_network_init();
// 输入(RTSP)和输出(RTMP)的URL
AVFormatContext *input_ctx = nullptr;
AVFormatContext *output_ctx = nullptr;
int ret;
// 打开输入流
if ((ret = avformat_open_input(&input_ctx, rt_url, nullptr, nullptr)) < 0) {
return ;
}
if ((ret = avformat_find_stream_info(input_ctx, nullptr)) < 0) {
return ;
}
char filename[64] = "test.ts";
// 打开输出流
avformat_alloc_output_context2(&output_ctx, nullptr, "mpegts", filename); // FLV格式用于RTMP
if (!output_ctx) {
qDebug()<< "Cannot create output context";
return;
}
// 从输入流复制流信息到输出流
for (int i = 0; i < input_ctx->nb_streams; i++) {
AVStream *in_stream = input_ctx->streams[i];
AVStream *out_stream = avformat_new_stream(output_ctx, nullptr);
if (!out_stream) {
return ;
}
avcodec_parameters_copy(out_stream->codecpar, in_stream->codecpar);
out_stream->codecpar->codec_tag = 0;
}
// 打开输出URL
if (!(output_ctx->oformat->flags & AVFMT_NOFILE)) {
if (avio_open(&output_ctx->pb, rm_url, AVIO_FLAG_WRITE) < 0) {
return ;
}
}
// 写输出流的头部信息
if (avformat_write_header(output_ctx, nullptr) < 0) {
return ;
}
// 主循环,从输入读取数据并写入输出
AVPacket pkt;
while (true) {
AVStream *in_stream, *out_stream;
// 获取一个数据包
if (av_read_frame(input_ctx, &pkt) < 0)
break;
in_stream = input_ctx->streams[pkt.stream_index];
out_stream = output_ctx->streams[pkt.stream_index];
// 复制数据包
pkt.pts = av_rescale_q_rnd(pkt.pts, in_stream->time_base, out_stream->time_base, (AVRounding)(AV_ROUND_NEAR_INF|AV_ROUND_PASS_MINMAX));
pkt.dts = pkt.pts;
pkt.duration = av_rescale_q(pkt.duration, in_stream->time_base, out_stream->time_base);
pkt.pos = -1;
// 写数据包到输出
if (av_interleaved_write_frame(output_ctx, &pkt) < 0) {
break;
}
av_packet_unref(&pkt);
}
// 写输出流的尾部信息
av_write_trailer(output_ctx);
// 关闭输入和输出
avformat_close_input(&input_ctx);
if (output_ctx && !(output_ctx->oformat->flags & AVFMT_NOFILE))
avio_closep(&output_ctx->pb);
avformat_free_context(output_ctx);
avformat_network_deinit();
}
#ifndef CAMERAREALTHREAD_H
#define CAMERAREALTHREAD_H
#include <QThread>
#include <QString>
class CameraRealThread : public QThread {
Q_OBJECT
public:
CameraRealThread(const QString& rtspUrl, const QString& rtmpUrl);
~CameraRealThread();
protected:
void run() override;
private:
QString m_rtspUrl;
QString m_rtmpUrl;
};
#endif // CAMERAREALTHREAD_H
//#include "CameraThread.h"
//CameraThread::CameraThread(QObject *parent) : QThread(parent),running(true) {
//}
//CameraThread::~CameraThread() {
// // 清理代码,例如停止线程、清除任务队列等
// running.store( false); // 停止线程
// if (isRunning()) {
// wait(); // 等待线程结束
// }
//}
//void CameraThread::addTask(const Task &task) {
// // 将任务添加到队列中
// std::lock_guard<std::mutex> lock(taskMutex);
// taskQueue.push(std::move(task));
// taskCondition.notify_one(); // 通知线程有新任务
//}
//// 重写QThread的run方法
//void CameraThread::run() {
// while (running) {
// std::unique_lock<std::mutex> lock(taskMutex); // 使用unique_lock管理锁
// taskCondition.wait(lock, [this] { return !taskQueue.empty() || !running; });
// if (!taskQueue.empty()) {
// Task task = taskQueue.front();
// taskQueue.pop();
// lock.unlock(); // 解锁,以便在执行任务时其他线程可以操作队列
// task();// 执行任务
// lock.lock(); // 重新加锁
// }
// }
//}
//#ifndef CAMERATHREAD_H
//#define CAMERATHREAD_H
//#include <functional>
//#include "XSDKPublic.h"
//#include "XNetSDKSyn.h"
//#include "XNetSDKDefine.h"
//#include <queue>
//#include <QThread>
//#include <mutex>
//#include <condition_variable>
//#include <atomic>
//using namespace std;
//class CameraThread : public QThread {
// Q_OBJECT
//public:
// CameraThread(QObject *parent = nullptr);
// ~CameraThread();
// typedef function<void()> Task;
// void addTask(const Task &task);
//protected:
// void run() override;
//private:
// std::queue<Task> taskQueue;
// std::mutex taskMutex;
// std::condition_variable taskCondition;
// std::atomic<bool> running{true};
//};
//#endif // CAMERATHREAD_H
#include "Common.h"
Common::Common(){}
QString Common::getTimeString() {
QDateTime currentDateTime = QDateTime::currentDateTime();
QString formattedDateTime = currentDateTime.toString("yyyy-MM-dd hh:mm:ss");
return formattedDateTime;
}
QString Common::timestampToDateString(qint64 timestamp) {
QDateTime dateTime = QDateTime::fromSecsSinceEpoch(timestamp);
QString formattedDate = dateTime.toString("yyyy-MM-dd hh:mm:ss");
return formattedDate;
}
QString Common::generateSignature(const QString& accessKeySecret, const QString& verb,
const QString& contentMD5, const QString& contentType,
const QString& date, const QString& ossHeaders,
const QString& canonicalizedResource)
{
// 构造规范化的字符串
QString stringToSign = verb + "\n" +
contentMD5 + "\n" +
contentType + "\n" +
date + "\n" +
ossHeaders+ "\n" + // 添加'\n'分隔符,并确保ossHeaders末尾没有多余的空白
canonicalizedResource;
// 将密钥和消息转换为字节数组
QByteArray hmacKey = accessKeySecret.toUtf8();
QByteArray message = stringToSign.toUtf8();
// 使用HMAC-SHA1计算签名
QMessageAuthenticationCode mac(QCryptographicHash::Sha1);
mac.setKey(hmacKey);
mac.addData(message);
QByteArray signature = mac.result().toBase64(); // 直接使用QMessageAuthenticationCode的结果
return QString(signature);
}
QString Common::getVideoOut(){
return videoOut;
}
void Common::setVideoOut(QString videoOut){
videoOut.append("/");
this->videoOut=videoOut;
}
QString Common::getVideoDownload(){
return videoDownload;
}
void Common::setVideoDownload(QString videoDownload){
videoDownload.append("/");
this->videoDownload=videoDownload;
}
QString Common::getImages(){
return images;
}
void Common::setImages(QString images){
images.append("/");
this->images=images;
}
QString Common::GetLocalIp() {
QString ipAddress;
QList<QHostAddress> list = QNetworkInterface::allAddresses();
for (const QHostAddress& address : list) {
if (address != QHostAddress::LocalHost && address.toIPv4Address()) {
ipAddress = address.toString();
break;
}
}
if (ipAddress.isEmpty()) {
ipAddress = QHostAddress(QHostAddress::LocalHost).toString();
}
return ipAddress;
}
Common::~Common(){}
#ifndef COMMON_H
#define COMMON_H
#include <ctime>
#include <chrono>
#include <thread>
#include <QString>
#include <cstring>
#include <QDateTime>
#include <QByteArray>
#include <QCryptographicHash>
#include <QMessageAuthenticationCode>
#include <QNetworkInterface>
#include <QDebug>
class Common
{
public:
Common(const Common &)=delete;
void operator=(const Common &)=delete;
static Common &getInstance(){
static Common instance;
return instance;
}
QString getTimeString();
QString timestampToDateString(qint64 timestamp);
QString generateSignature(const QString& accessKeySecret, const QString& verb,
const QString& contentMD5, const QString& contentType,
const QString& date, const QString& ossHeaders,
const QString& canonicalizedResource);
QString getVideoOut();
void setVideoOut(QString videoOut);
QString getVideoDownload();
void setVideoDownload(QString videoDownload);
QString getImages();
void setImages(QString images);
QString GetLocalIp();
template<typename T>
void deleteObj(T*& obj) {
if(obj != nullptr) {
delete obj;
obj = nullptr;
}
}
private:
QString videoOut;
QString videoDownload;
QString images;
Common();
~Common();
};
#endif // COMMON_H
//#include "FaceDetectionParkingPush.h"
//FaceDetectionParkingPush::FaceDetectionParkingPush(){}
//FaceDetectionParkingPush::FaceDetectionParkingPush(QString &url, QString &httpUrl, QString &sSn, QTimer* devSnapSynTimer, int &channel)
// : hDevice(-1),
// currentConStatus(false),
// loginParam(new SXSDKLoginParam()),
// sxMediaFaceImageReq(new SXMediaFaceImageReq()),
// channel(channel),
// dev_snap_syn_timer(new QTimer(this)) {
//}
//FaceDetectionParkingPush::~FaceDetectionParkingPush() {
// Common & instace= Common::getInstance();
// instace.deleteObj(dev_snap_syn_timer);
// instace.deleteObj(loginParam);
// instace.deleteObj(sxMediaFaceImageReq);
// QThreadPool::globalInstance()->waitForDone();
//}
//int FaceDetectionParkingPush::SdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout) {
// QByteArray byteArray = sDevId.toUtf8();
// char* cDevid=byteArray.data();
// strcpy(loginParam->sDevId, cDevid);
// loginParam->nDevPort=nDevPort;
// QByteArray byteName = sUserName.toUtf8();
// char* cName=byteName.data();
// strcpy(loginParam->sUserName, cName);
// if(sPassword.length()>0){
// QByteArray bytePassword = sPassword.toUtf8();
// strcpy(loginParam->sPassword, bytePassword.constData());
// }else{
// strcpy(loginParam->sPassword, "");
// }
// loginParam->nCnnType=EDEV_CNN_TYPE_AUTO;
// int loginResult =XSDK_DevLoginSyn(loginParam,nTimeout);
// if(loginResult<0){
// qInfo() << "登录设备失败";
// return loginResult;
// }
// return loginResult;
//}
//int FaceDetectionParkingPush::SdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener) {
// return XSDK_DevSetAlarmListener(hDevice,bListener);
//}
//int FaceDetectionParkingPush::getChannel(){
// return channel;
//}
//int FaceDetectionParkingPush::getHdevice() {
// return hDevice;
//}
//bool FaceDetectionParkingPush::getConnectionStatus() {
// return currentConStatus;
//}
//void FaceDetectionParkingPush::initSdkRealTimeDevSnapSyn(int hDevice) {
// connect(dev_snap_syn_timer, &QTimer::timeout, this, [this,hDevice]() {
// this->SdkRealTimeDevSnapSyn(hDevice);
// }, Qt::QueuedConnection);
// dev_snap_syn_timer->start(2000);
//}
//void FaceDetectionParkingPush::SdkRealTimeDevSnapSyn(int hDevice) {
// QThreadPool* threadPool = QThreadPool::globalInstance();
// threadPool->setMaxThreadCount(6);
// if (!task) { // 检查是否已经创建了TaskRunnable
// task = new TaskRunnable(this, hDevice, this->channel,TaskRunnable::SdkDevSnapSyn);
// task->setAutoDelete(false); // 确保task不会在执行后被自动删除
// }else {
// task->setHdevice(hDevice);
// task->setChannel(this->channel);
// task->setCallbackFunction(TaskRunnable::SdkDevSnapSyn);
// }
// if (!threadPool->tryStart(task)) { // 尝试启动任务,如果线程池满了则不会启动
// qDebug() << "线程池已满,无法启动TaskRunnable";
// }
//}
//TaskRunnable*FaceDetectionParkingPush::getTask(){
// return task;
//}
//void FaceDetectionParkingPush::setTask(TaskRunnable* newTask){
// this->task=newTask;
//}
//int FaceDetectionParkingPush::CallbackFunction(XSDK_HANDLE hObject, QString &szString) {
// QByteArray && byJson = szString.toLocal8Bit();
// const char * cJson= byJson.data();
// XSDK_CFG::AlarmInfo alarmInfo;
// if (0 == alarmInfo.Parse(cJson))
// {
// const char* buf = alarmInfo.Event.ToString();
// qInfo() << "buf:"<<buf;
// qInfo() << "OnDevAlarmCallback[Dev:" << hObject << "]"
// << "\r\nEvent:" << alarmInfo.Event.Value()
// << "\r\nChannel:" << alarmInfo.Channel.Value()
// << "\r\nStartTime:" << alarmInfo.StartTime.Value()
// << "\r\nStatus:" << alarmInfo.Status.Value();
// }
// else
// {
// qDebug() << "OnDevAlarmCallback[Dev:" << hObject << "][Event:" << szString << "]";
// }
// cv::Mat image;
// MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
// mediaFaceImage->FaceImageCallBack(hObject,sxMediaFaceImageReq->nChannel,image);
// if (image.empty())
// {
// qInfo() << "Failed to read the image";
// return -1;
// }
// this->updateImage(image, url);
//}
//void FaceDetectionParkingPush::updateImage(const cv::Mat & frame,const QString &respUrl){
// FaceReconition &faceRecognition = FaceReconition::getInstance();
// LicensePlateRecognition &licensePlateRecogn =LicensePlateRecognition::getInstance();
// //faceRecognition.search(frame,imageHandleList,names);
// if(faceRecognition.doesItExistEmployee(frame)){
// }
// QString lpNumber;
// licensePlateRecogn.licensePlateNumber(frame, lpNumber);
// std::lock_guard<std::mutex> guard(queueMutex);
// if (queuels.size() >= 10) {
// queuels.dequeue();
// }
// RecognizedInfo recognizedInfo(lpNumber,QDateTime::currentMSecsSinceEpoch());
// queuels.enqueue(recognizedInfo);
// this->CheckAndUpdateCurrentPlate(recognizedInfo);
// if(lpNumber.length()>0){
// qDebug()<<QString("识别的车牌号是:%1").arg(lpNumber);
// }else {
// qDebug()<<QString("当前这帧图像未识别车牌");
// }
//}
//void FaceDetectionParkingPush::CheckAndUpdateCurrentPlate( RecognizedInfo& newInfo){
// std::lock_guard<std::mutex> guard(plateMutex);
// if (newInfo.getLicensePlate() != currentPlate.getLicensePlate()) {
// int count = 0;
// for (auto& info : queuels) {
// if (info.getLicensePlate() == newInfo.getLicensePlate()) {
// count++;
// }
// }
// //第一次进场 当前车牌就是进来这个,老车牌就是空
// //出场的时候 当前车牌是空, 老车牌是出厂车牌
// if (count >= 3) {
// if(currentPlate.getLicensePlate().length()<=0){
// qInfo()<<"未出场车:"<<currentPlate.getLicensePlate()<<"进场的车牌号:"<<newInfo.getLicensePlate();
// }else {
// qInfo()<<"出场车牌号:"<<currentPlate.getLicensePlate()<<"进场的车牌号:"<<newInfo.getLicensePlate();
// }
// XSDK_HANDLE h_device=getHdevice();
// QMetaObject::invokeMethod(dev_snap_syn_timer, "stop", Qt::QueuedConnection);
// QObject::disconnect(dev_snap_syn_timer, &QTimer::timeout, MainWindow::sp_this, nullptr);
// QObject::connect(dev_snap_syn_timer, &QTimer::timeout, MainWindow::sp_this, [this, h_device]() {
// this->SdkRealTimeDevSnapSyn(h_device);
// }, Qt::QueuedConnection);
// QMetaObject::invokeMethod(dev_snap_syn_timer, "start", Qt::QueuedConnection, Q_ARG(int, 15000));
// currentPlate = newInfo;
// }
// }
//}
//void FaceDetectionParkingPush::SdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
// if(hDevice<=0){
// qInfo() << "相机断线";
// return;
// }
// cv::Mat image;
// MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
// int ret=mediaFaceImage->FaceImageCallBack(hDevice,nChannel, image);
// Common & instace= Common::getInstance();
// vides_data::requestDeviceStatus reStatus;
// reStatus.sSn=sSn;
// if(ret>0){
// if(!currentConStatus){
// reStatus.status=1;
// reStatus.type=2;
// HttpService httpService(httpUrl);
// vides_data::response*re= httpService.httpPostDeviceStatus(reStatus);
// if(re->code!=0){
// qInfo()<<"请求设备状态失败";
// }
// instace.deleteObj(re);
// }
// currentConStatus=true;
// }else{
// if(currentConStatus){
// reStatus.status=0;
// reStatus.type=2;
// HttpService httpService(httpUrl);
// vides_data::response*re= httpService.httpPostDeviceStatus(reStatus);
// if(re->code!=0){
// qInfo()<<"请求设备状态失败";
// }
// instace.deleteObj(re);
// }
// currentConStatus=false;
// }
// if (image.empty())
// {
// qInfo() << "Failed to read the image";
// return;
// }
// this->updateImage(image, url);
//}
//#ifndef FACEDETECTIONPARKINGPUSH_H
//#define FACEDETECTIONPARKINGPUSH_H
//#include "RecognitionInfo.h"
//#include "FaceRecognition.h"
//#include "HttpService.h"
//#include "LicensePlateRecognition.h"
//#include "Json_Header/AlarmInfo.h"
//#include "TaskRunnable.h"
//#include "mainwindow.h"
//#include <QDateTime>
//#include <QJsonDocument>
//#include "Common.h"
//#include <QString>
//#include <QObject>
//#include <QTimer>
//#include <QThreadPool>
//#include <QQueue>
//#include <opencv2/opencv.hpp>
//class FaceDetectionParkingPush : public QObject {
// Q_OBJECT
//public:
// FaceDetectionParkingPush(QString &url,QString &httpUrl,QString &sSn,QTimer* devSnapSynTimer, int &channel);
// ~FaceDetectionParkingPush();
// FaceDetectionParkingPush();
// int SdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
// //int SdkMediaGetFaceImage(int hDevice, int nSeq, int nTimeout);
// int SdkDevSetAlarmListener(XSDK_HANDLE hDevice, int bListener);
// int getHdevice();
// int getChannel();
// bool getConnectionStatus();
// void initSdkRealTimeDevSnapSyn(int hDevice);
// void updateImage(const cv::Mat & frame,const QString &respUrl);
// int CallbackFunction(XSDK_HANDLE hObject,QString &szString);
// void CheckAndUpdateCurrentPlate( RecognizedInfo& newInfo);
// void SdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel);
// TaskRunnable*getTask();
// void setTask(TaskRunnable* newTask);
//signals:
// void CallbackFrameReady(const cv::Mat &frame, const QString &url);
//public slots:
// void SdkRealTimeDevSnapSyn(int hDevice);
//private:
// TaskRunnable* task = nullptr;
// int hDevice;
// int channel;
// SXSDKLoginParam *loginParam;
// SXMediaFaceImageReq *sxMediaFaceImageReq;
// std::mutex queueMutex;
// std::mutex plateMutex;
// std::mutex taskThread;
// QQueue<RecognizedInfo> queuels;
// RecognizedInfo currentPlate;
// QString sSn;
// QString url;
// volatile bool currentConStatus=false;
// //2秒钟抓一次图
// QTimer *dev_snap_syn_timer;
//};
//#endif // FACEDETECTIONPARKINGPUSH_H
//#ifndef FACEDETECTIONPARKINGPUSHIMPL_H
//#define FACEDETECTIONPARKINGPUSHIMPL_H
//#include "XSDKPublic.h"
//#include "FaceDetectionParkingPush.h"
//#include "XNetSDKDefine.h"
//#include "Common.h"
//#include "CameraThread.h"
//#include "MediaFaceImage.h"
//class FaceDetectionParkingPushImpl {
//public:
// FaceDetectionParkingPushImpl(FaceDetectionParkingPush* parent,QString &framePath, QString &url);
// int SdkInit(QString &szConfigPath, QString &szTempPath);
// XSDK_HANDLE SdkDevLoginSyn(QString sDevId, int nDevPort, QString sUserName, QString sPassword, int nTimeout);
// XSDK_HANDLE SdkMediaGetFaceImage(XSDK_HANDLE hDevice, int nSeq, int nTimeout);
// int callbackFunction(XSDK_HANDLE hObject, int nMsgId, int nParam1, int nParam2, int nParam3, const char* szString, void* pObject, int64 lParam, int nSeq, void* pUserData, void* pMsg);
// CameraThread *getCameraThread();
//private:
// SXSDKInitParam *pParam;
// SXSDKLoginParam *loginParam;
// SXMediaFaceImageReq *sxMediaFaceImageReq;
// CameraThread *cameraThread;
// QString framePath;
// QString url;
// FaceDetectionParkingPush* parent;
//};
//#endif // FACEDETECTIONPARKINGPUSHIMPL_H
#ifndef FACERECOGNITION_H
#define FACERECOGNITION_H
#include "hyperface.h"
#include <opencv2/opencv.hpp>
#include<QCoreApplication>
#include "herror.h"
#include "LogHandle.h"
#include "VidesData.h"
#include <mutex>
class FaceReconition
{
private:
static FaceReconition* instance;
HContextHandle ctxHandle;
float configConfidence;
std::vector<int32_t>customIds;
FaceReconition();
~FaceReconition();
public:
static FaceReconition& getInstance()
{
static FaceReconition instance;
return instance;
}
void doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&face);
void initSourceImageMap(std::map<QString,QString>&maps,float confidence);
int featureRemove();
};
#endif // FACERECOGNITION_H
#include "FaceRecognition.h"
FaceReconition::FaceReconition() {}
FaceReconition::~FaceReconition(){
if (ctxHandle != nullptr) {
HF_ReleaseFaceContext(ctxHandle);
ctxHandle = nullptr;
}
}
FaceReconition* FaceReconition::instance = nullptr;
void FaceReconition::initSourceImageMap(std::map<QString,QString>&maps,float confidence){
featureRemove();
HResult ret;
// 初始化context
#if defined(__arm__) || defined(__ARM_ARCH)
// ARM 平台相关的代码
QString bPath = QCoreApplication::applicationDirPath() + "/model_zip/N1su_5";
#elif defined(__i386__) || defined(__x86_64__)
QString bPath = QCoreApplication::applicationDirPath() + "/model_zip/T1_5";
#else
#error "不支持的架构"
#endif
QByteArray && bypath = bPath.toUtf8();
char* spath = bypath.data();
HString path = spath;
HInt32 option = HF_ENABLE_QUALITY | HF_ENABLE_FACE_RECOGNITION | HF_ENABLE_MASK_DETECT;
HF_DetectMode detMode = HF_DETECT_MODE_IMAGE; // 选择图像模式 即总是检测
// 创建ctx
ret = HF_CreateFaceContextFromResourceFileOptional(path, option, detMode, 5, &ctxHandle);
if (ret != HSUCCEED) {
qInfo() << QString("Create ctx error: %1").arg(ret);
return;
}
// ret = HF_FaceRecognitionThresholdSetting(ctxHandle, 0.36);
// if (ret != HSUCCEED) {
// qInfo() << QString("HF_FaceRecognitionThresholdSetting error: %1").arg(ret);
// return;
// }
customIds.clear();
int i = 0;
for (auto it = maps.begin(); it != maps.end(); ++it,++i) {
const QString& key = it->first;
const QString& value = it->second;
cv::Mat image = cv::imread(value.toStdString());
if (image.empty()) {
qInfo()<< "错误:图像为空或路径不正确,无法加载图像 ";
return;
}
HF_ImageData imageData = {0};
imageData.data = image.data;
imageData.height = image.rows;
imageData.width = image.cols;
imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
this->configConfidence=confidence;
if (ret != HSUCCEED) {
qInfo() << QString("image handle error: %1").arg((long)imageSteamHandle);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) {
qInfo() << QString("initSourceImageMap:未检测到人脸: %1").arg(key);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
HF_FaceFeature feature = {0};
ret = HF_FaceFeatureExtract(ctxHandle, imageSteamHandle, multipleFaceData.tokens[0], &feature);
if (ret != HSUCCEED) {
qInfo() << QString("特征提取出错: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
char* tagName = new char[key.size() + 1];
std::strcpy(tagName, key.toStdString().c_str());
HF_FaceFeatureIdentity identity = {0};
identity.feature = &feature;
identity.customId = i;
customIds.push_back( identity.customId);
identity.tag = tagName;
ret = HF_FeaturesGroupInsertFeature(ctxHandle, identity);
if (ret != HSUCCEED) {
qInfo() << QString("插入失败: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle); // 释放资源
return;
}
delete[] tagName;
ret = HF_ReleaseImageStream(imageSteamHandle);
if (ret == HSUCCEED) {
imageSteamHandle = nullptr;
qInfo() << QString("mage released");
} else {
qInfo() << QString("image release error: %l").arg(ret);
}
}
}
int FaceReconition::featureRemove(){
if(customIds.size()>0){
for(auto customId:customIds){
HResult ret= HF_FeaturesGroupFeatureRemove(ctxHandle,customId);
qDebug()<<"ret:featureRemove "<<ret;
}
HF_ReleaseFaceContext(ctxHandle);
}
}
void FaceReconition::doesItExistEmployee(const cv::Mat &source,std::list<vides_data::faceRecognitionResult>&faces){
HResult ret;
HF_ContextCustomParameter parameter = {0};
HF_ImageData imageData = {0};
imageData.data = source .data;
imageData.height = source.rows;
imageData.width = source.cols;
imageData.rotation = VIEW_ROTATION_0;
imageData.format = FORMAT_BGR;
HImageHandle imageSteamHandle;
ret = HF_CreateImageStream(&imageData, &imageSteamHandle);
if (ret != HSUCCEED) {
qInfo()<<QString("image handle error:%1").arg((long) imageSteamHandle,0,10);
return ;
}
HF_MultipleFaceData multipleFaceData = {0};
HF_FaceContextRunFaceTrack(ctxHandle, imageSteamHandle, &multipleFaceData);
if (multipleFaceData.detectedNum <= 0) {
qDebug()<<QString("search 未检测到人脸");
return ;
}
std::vector<std::vector<float>> features;
// 被搜索的目标这边推荐使用拷贝式的接口来获取特征向量
HInt32 featureNum;
HF_GetFeatureLength(ctxHandle, &featureNum);
for(int j=0;j< multipleFaceData.detectedNum; ++j){
std::vector<float> newfeature(featureNum,0.0f);
ret = HF_FaceFeatureExtractCpy(ctxHandle, imageSteamHandle, multipleFaceData.tokens[j], newfeature.data());
if(ret != HSUCCEED) {
qDebug()<<QString("特征提取出错: %1").arg(ret);
HF_ReleaseImageStream(imageSteamHandle);
return ;
}
features.push_back(newfeature);
}
int rect=0;
for(auto feat:features){
HF_FaceFeature feature;
feature.size = feat.size();
feature.data = feat.data();
HF_FaceFeatureIdentity searchIdentity = {0};
// HF_FaceFeature featureSearched = {0};
// searchIdentity.feature = &featureSearched;
HFloat confidence;
ret = HF_FeaturesGroupFeatureSearch(ctxHandle, feature, &confidence, &searchIdentity);
if (ret != HSUCCEED) {
qInfo()<<QString("搜索失败: %1").arg(ret);
return ;
}
qDebug()<<QString("搜索置信度: %1").arg(confidence);
qDebug()<<QString("匹配到的tag: %1").arg(searchIdentity.tag);
qDebug()<<QString("匹配到的customId: %1").arg(searchIdentity.customId);
// Face Pipeline
ret = HF_MultipleFacePipelineProcess(ctxHandle, imageSteamHandle, &multipleFaceData, parameter);
if (ret != HSUCCEED) {
//printf("pipeline执行失败: %ld", ret);
qInfo()<<QString("pipeline执行失败: %1").arg(ret);
return ;
}
HF_RGBLivenessConfidence livenessConfidence = {0};
ret = HF_GetRGBLivenessConfidence(ctxHandle, &livenessConfidence);
if (ret != HSUCCEED) {
qInfo()<<QString("获取活体数据失败1");
return ;
}
//printf("活体置信度: %f", livenessConfidence.confidence[0]);
qDebug()<<QString("活体置信度====>:%1").arg(livenessConfidence.confidence[0],0,'Q',4);
HF_FaceMaskConfidence maskConfidence = {0};
ret = HF_GetFaceMaskConfidence(ctxHandle, &maskConfidence);
if (ret != HSUCCEED) {
qInfo()<<QString("获口罩数据失败");
return ;
}
HInt32 faceNum;
ret = HF_FeatureGroupGetCount(ctxHandle, &faceNum);
if (ret != HSUCCEED) {
// printf("获取失败");
qInfo()<<QString("获取失败");
return ;
}
//printf("人脸特征数量: %d", faceNum);
if (confidence > configConfidence) {
vides_data::faceRecognitionResult newface;
newface.id=searchIdentity.tag;
newface.x=multipleFaceData.rects[rect].x;
newface.y=multipleFaceData.rects[rect].y;
newface.width=multipleFaceData.rects[rect].width;
newface.height=multipleFaceData.rects[rect].height;
faces.push_back(newface);
}
rect++;
}
ret = HF_ReleaseImageStream(imageSteamHandle);
if (ret == HSUCCEED) {
imageSteamHandle = nullptr;
// printf("image released");
} else {
//printf("image release error: %ld", ret);
qInfo()<<QString("image release error: %1").arg(ret);
}
}
#include "HandleRtsp.h"
HandleRtsp::HandleRtsp(const QString & respUrl,QSemaphore& semaphore,QObject *parent )
:QThread(parent),respUrl(respUrl),semaphore(semaphore),stopFlag(false)
{
frameCounter.store(63);
}
HandleRtsp::~HandleRtsp(){
qInfo()<<QString("HandleResp %1 stopFlag").arg(respUrl);
stopFlag=true;
wait();
}
void HandleRtsp::setFrameCounter(int frameCounter){
this->frameCounter.store(frameCounter);
}
bool HandleRtsp::getFrameCounterStatus(){
return isFrameCounter;
}
//void HandleRtsp::run(){
// semaphore.acquire();
// std::string r_url = respUrl.toStdString();
// cv::VideoCapture cvCapture;
// cvCapture.set(cv::CAP_PROP_FPS, 30);
// int durationInSeconds = 600; // 视频片段时长为10分钟
// qDebug() << QString::fromStdString(r_url);;
// if(!cvCapture.open(r_url, cv::CAP_FFMPEG)){
// emit streamInterrupted(respUrl);
// return;
// }
// int frameCount;
// cv::Mat frame;
// cv::Size frameSize(640, 480);
// std::time_t t = std::time(nullptr);
// Common & instace= Common::getInstance();
// QString video= instace.getVideoOut();
// video.append("/").append(instace.getTimeString(t)).append(".mkv");
// std::string outputFileName =video.toStdString();
// qDebug()<<QString::fromStdString(outputFileName);
// int frame_width = static_cast<int>(cvCapture.get(cv::CAP_PROP_FRAME_WIDTH));
// int frame_height = static_cast<int>(cvCapture.get(cv::CAP_PROP_FRAME_HEIGHT));
// double fps = 30.0; // 可以从cap.get(cv::CAP_PROP_FPS)获取摄像头的帧率
// // int fourcc = cv::VideoWriter::fourcc('M', 'J', 'P', 'G'); // 使用H.264编码器
// cv::VideoWriter writer(outputFileName, cv::VideoWriter::fourcc('H', '2', '6', '4'), fps, cv::Size(frame_width, frame_height), true);
// while (!stopFlag) {
// if(!cvCapture.read(frame)){
// qInfo()<<"emit streamInterrupted(respUrl)";
// emit streamInterrupted(respUrl);
// break;
// }
// if(!frame.empty()){
// writer.write(frame); // 将帧写入视频文件
// //默认64
// if((frameCount && frameCounter.load())==0){
// cv::Mat frame_bgr;
// cv::cvtColor(frame, frame_bgr, cv::COLOR_RGB2BGR);
// emit frameReady(frame_bgr, respUrl);
// }
// frameCount++;
// }
// }
// cvCapture.release();
//}
void HandleRtsp::run() {
cv::VideoCapture cvCapture;
cvCapture.set(cv::CAP_PROP_FPS, 30);
auto start = QDateTime::currentDateTime();
std::string r_url = respUrl.toStdString();
Common &instance = Common::getInstance();
if(!cvCapture.open(r_url, cv::CAP_FFMPEG)){
emit streamInterrupted(respUrl);
return;
}
QString video = instance.getVideoOut();
std::time_t t = std::time(nullptr);
video.append("/").append(instance.getTimeString(t)).append(".mkv");
std::string outputFileName = video.toStdString();
qDebug() << QString::fromStdString(outputFileName);
int frame_width = static_cast<int>(cvCapture.get(cv::CAP_PROP_FRAME_WIDTH));
int frame_height = static_cast<int>(cvCapture.get(cv::CAP_PROP_FRAME_HEIGHT));
double fps = cvCapture.get(cv::CAP_PROP_FPS);
int frameCount;
cv::Mat frame;
cv::VideoWriter writer(outputFileName, cv::VideoWriter::fourcc('H', '2', '6', '4'), fps, cv::Size(frame_width, frame_height), true);
while (!stopFlag) {
if (!cvCapture.read(frame)) {
qInfo() << "emit streamInterrupted(respUrl)";
emit streamInterrupted(respUrl);
break;
}
while (true) {
if (!frame.empty()) {
writer.write(frame); // 写入视频帧
if ((frameCount % frameCounter.load()) == 0) { // Assuming you want to emit signals every 64 frames
cv::Mat frame_bgr;
cv::cvtColor(frame, frame_bgr, cv::COLOR_RGB2BGR);
emit frameReady(frame_bgr, respUrl);
}
frameCount++;
}
auto now =QDateTime::currentDateTime();
auto elapsed = start.secsTo(now);
if (elapsed >= 600 || stopFlag) {
break;
}
}
writer.release();
if (stopFlag) {
break;
}
start = QDateTime::currentDateTime();
}
cvCapture.release(); // 释放摄像头资源
}
#ifndef HANDLERTSP_H
#define HANDLERTSP_H
#include "LogHandle.h"
#include "FaceRecognition.h"
#include "hyper_lpr_sdk.h"
#include "Common.h"
#include <HandleRtsp.h>
#include <QDebug>
#include <QString>
#include <QThread>
#include <ctime>
#include <QSemaphore>
#include <opencv2/opencv.hpp>
#include <QtCore/QAtomicInteger>
class HandleRtsp:public QThread
{
Q_OBJECT
public:
HandleRtsp(const QString & respUrl,QSemaphore& semaphore,QObject *parent=nullptr);
void run () override;
void setFrameCounter(int frameCounter);
bool getFrameCounterStatus();
~HandleRtsp();
signals:
void frameReady(const cv::Mat & frame,const QString &url);
void streamInterrupted(const QString &url);
private:
QString respUrl;
volatile bool stopFlag;
volatile bool isFrameCounter;
QSemaphore & semaphore;
std::map<int,cv::Mat>maps;
QAtomicInteger<int> frameCounter;
};
#endif // HANDLERTSP_H
#ifndef HTTPCLIENT_H
#define HTTPCLIENT_H
#include "Common.h"
#include <QHttpPart>
#include <QSsl>
#include <QObject>
#include <QNetworkAccessManager>
#include <QNetworkReply>
#include <QEventLoop>
#include <QFile>
#include <QTimer>
#include <QTextCodec>
#include <QDir>
#include <QRandomGenerator>
#include <QSslConfiguration>
class HttpClient : public QObject
{
Q_OBJECT
public:
explicit HttpClient(QObject *parent = nullptr);
~HttpClient();
bool get(QNetworkRequest &request);
bool post(QNetworkRequest &request, const QByteArray &data);
bool downloadFile(QNetworkRequest request, const QString& filePath,
QString &fullPathName,QString &extension);
bool uploadFile(QNetworkRequest request,const QString& accessKeyId,
const QString& accessKeySecret,const QString& filePath,
QString & bucketName,QString &securityToken);
QString errorCode() const;
QString errorString() const;
QString text() const;
QString FileName() const;
void setFileName(QString fileName);
private:
void processReply(QNetworkReply *reply);
private:
QNetworkAccessManager *m_networkAccessManager = nullptr;
QNetworkReply::NetworkError m_error;
QString m_text; // 存放网络请求返回的文本字符串
QString m_errorString;//存放错误信息
int m_networkErrorRetry = 0; // 网络错误重试次数
QEventLoop m_eventLoop;
QTimer *m_timer = nullptr;
QByteArray m_data; // 存放网络请求返回的原始数据
};
#endif // HTTPCLIENT_H
This diff is collapsed. Click to expand it.
#ifndef HTTPSERVICE_H
#define HTTPSERVICE_H
#include "LogHandle.h"
#include "HttpClient.h"
#include "VidesData.h"
#include "Common.h"
#include <list>
#include <QJsonObject>
#include <QJsonArray>
#include <QJsonDocument>
#include <QUrlQuery>
#include <QMutex>
#include <QMutexLocker>
const QString OPERATION_FAILED="操作失败";
const QString OPERATION_SUCCESS="操作成功";
class HttpService{
public:
HttpService();
HttpService(QString httpUrl);
//单次出⻔成功上报接⼝
vides_data::response* httpPostDeviceStatus(vides_data::requestDeviceStatus & deviceStatus);
vides_data::response* httpPostRecord(int id,int recongnition_type,QString sn,QString videw_addr);
vides_data::response *httpFindCameras(QString &serialNumber,vides_data::responseDeviceData&datas);
//车牌识别结果推送
vides_data::response *httpLicensePlateRecognition(vides_data::requestLicensePlate &licensePlateRecognition,
std::list<vides_data::responseRecognitionData>&result
);
//人脸列表
vides_data::response *httpFindFaceReconition(QString &serialNumber,std::list<vides_data::responseFaceReconition*>&datas);
//人脸识别推送
vides_data::response *httpPostFaceReconition(vides_data::requestFaceReconition & faceReconition);
//人数变化推送
vides_data::response *httpPostFacePopulation(QByteArray &img,int &number,QString sn,qint64 time);
//客户端组列表
vides_data::response *httpFindStream(QString &serialNumber);
vides_data::response *httpFindGb28181Config(QString &serialNumber);
vides_data::response *httpDownload( const QString &filePath,QString &fullPathName);
vides_data::response *httpUploadFile(const QString &filePath,QString& accessKeyId,QString& accessKeySecret,
QString & bucketName,QString &securityToken);
void setHttpUrl(const QString & httpUrl);
static vides_data::responseStsCredentials stsCredentials;
~HttpService();
private:
QString httpUrl;
HttpClient m_httpClient;
QMutex m_httpClientMutex;
};
#endif // HTTPSERVICE_H
#include "HttpClient.h"
HttpClient::HttpClient(QObject *parent)
: QObject(parent)
{
m_networkAccessManager = new QNetworkAccessManager(this);
m_timer = new QTimer(this);
m_timer->setInterval(4000);
m_timer->setSingleShot(true);
connect(m_timer, SIGNAL(timeout()), &m_eventLoop, SLOT(quit()));
}
HttpClient::~HttpClient()
{
delete m_networkAccessManager;
delete m_timer;
}
bool HttpClient::get(QNetworkRequest &request)
{
QUrl url = request.url();
// 转换 URL 为 QString
QString urlString = url.toString();
// 检查 URL 字符串是否包含特定的子字符串
QNetworkReply *reply;
bool success = false;
request.setPriority(QNetworkRequest::HighPriority);
for(int i = 0 ; i < m_networkErrorRetry + 1; i++)//
{
reply = m_networkAccessManager->get(request);
processReply(reply);
if (m_error == QNetworkReply::NoError)
{
success = true;
break;
}
}
return success;
}
bool HttpClient::post(QNetworkRequest &request, const QByteArray &data)
{
QNetworkReply *reply;
bool success = false;
request.setPriority(QNetworkRequest::HighPriority);
for (int i = 0; i < m_networkErrorRetry + 1; i++)
{
reply = m_networkAccessManager->post(request, data);
processReply(reply);
if (m_error == QNetworkReply::NoError)
{
success = true;
break;
}
}
return success;
}
bool HttpClient::downloadFile(QNetworkRequest request, const QString &filePath,
QString &fullPathName,QString &extension)
{
bool success = false;
QDir dir(filePath);
if (!dir.exists()) {
dir.mkpath(".");
}
Common & instace= Common::getInstance();
QString filePa = QDir::toNativeSeparators(filePath +extension);
QSslConfiguration config= QSslConfiguration::defaultConfiguration();
config.setProtocol(QSsl::AnyProtocol);
config.setPeerVerifyMode(QSslSocket::VerifyNone);
request.setSslConfiguration(config);
QNetworkReply* reply = m_networkAccessManager->get(request);
reply->ignoreSslErrors();
QEventLoop loop;
QObject::connect(reply, &QNetworkReply::finished, &loop, &QEventLoop::quit);
QFile file(filePa);
if (!file.open(QIODevice::WriteOnly)) {
qDebug() << "Failed to open file for writing";
reply->deleteLater(); // 确保释放网络回复资源
return false;
}
QObject::connect(reply, &QNetworkReply::readyRead, [&]() {
file.write(reply->readAll());
});
QObject::connect(reply, &QNetworkReply::finished, [&, reply]() {
if (reply->error() != QNetworkReply::NoError) {
qDebug() << "Download failed:" << reply->errorString();
file.remove(); // 删除不完整的文件
} else {
success = true;
file.flush();
file.close();
}
reply->deleteLater();
});
loop.exec();
fullPathName = filePa;
return success;
}
bool HttpClient::uploadFile(QNetworkRequest request,const QString& accessKeyId,
const QString& accessKeySecret,
const QString &filePath,QString & bucketName,QString &securityToken){
bool success = false;
QFile *file = new QFile(filePath, this);
qDebug() << "filePath"<<filePath;
if (!file->open(QIODevice::ReadOnly)) {
qDebug() << "uploadFile Failed to open file for reading";
return false;
}
QByteArray fileData = file->readAll();
if(fileData.isEmpty()){
qDebug() << "uploadFile = file.readAll()";
return false;
}
QFileInfo fileInfo(file->fileName());
QString fileName = fileInfo.fileName(); // 获取文件名,包括扩展名
QSslConfiguration config = QSslConfiguration::defaultConfiguration();
config.setProtocol(QSsl::AnyProtocol);
config.setPeerVerifyMode(QSslSocket::VerifyNone);
request.setSslConfiguration(config);
Common & instace= Common::getInstance();
// 获取当前的UTC时间
QDateTime now = QDateTime::currentDateTimeUtc();
// 设置英文环境,确保月份和星期格式正确
QLocale englishLocale(QLocale::English, QLocale::UnitedStates);
// 使用英文环境格式化日期时间为GMT字符串
QString gmtDateString = englishLocale.toString(now, "ddd, dd MMM yyyy HH:mm:ss 'GMT'");
QString oSSHeaders="x-oss-security-token:";
oSSHeaders.append(securityToken);
QString signature= instace.generateSignature(accessKeySecret,"PUT","","video/mp4",gmtDateString,oSSHeaders,"/"+bucketName+"/"+fileName);
request.setRawHeader("Date", gmtDateString.toUtf8());
QString authHeaderValue = "OSS " + accessKeyId + ":" + signature;
request.setRawHeader("Authorization", authHeaderValue.toUtf8());
request.setRawHeader("X-Oss-Security-Token",securityToken.toUtf8());
request.setHeader(QNetworkRequest::ContentTypeHeader, "video/mp4");
request.setHeader(QNetworkRequest::ContentLengthHeader, QString::number(fileData.size()).toUtf8());
QNetworkReply *reply = m_networkAccessManager->put(request,fileData);
reply->ignoreSslErrors();
QEventLoop loop;
connect(reply, &QNetworkReply::finished, this, [reply, file,&loop, &success]() {
if (reply->error() == QNetworkReply::NoError) {
qDebug() << "Upload successful!";
file->remove();
success=true;
} else {
qDebug() << "Upload failed:" << reply->errorString();
}
file->close();
file->deleteLater();
reply->deleteLater();
loop.quit();
});
// 启动事件循环,等待请求完成
loop.exec();
return success;
}
void HttpClient::processReply(QNetworkReply *reply)
{
connect(reply, SIGNAL(finished()), &m_eventLoop, SLOT(quit()));
m_text.clear();
m_timer->start();
m_eventLoop.exec();
if (m_timer->isActive())
{
m_timer->stop();
m_error = reply->error();
m_errorString = reply->errorString();
if (reply->bytesAvailable() > 0)
{
m_data = reply->readAll();
QTextCodec *codec = QTextCodec::codecForHtml(m_data, QTextCodec::codecForName("utf-8"));
if (codec)
m_text = codec->toUnicode(m_data);
}
else
{
m_data.clear();
m_text.clear();
}
}
else
{
reply->abort();
m_error = QNetworkReply::TimeoutError;
}
delete reply;
}
QString HttpClient::errorString() const{
return m_errorString;
}
QString HttpClient::errorCode() const
{
switch (m_error)
{
case QNetworkReply::NoError: return "NoError";
case QNetworkReply::ConnectionRefusedError: return "ConnectionRefusedError";
case QNetworkReply::RemoteHostClosedError: return "RemoteHostClosedError";
case QNetworkReply::HostNotFoundError: return "HostNotFoundError";
case QNetworkReply::TimeoutError: return "TimeoutError";
case QNetworkReply::OperationCanceledError: return "OperationCanceledError";
case QNetworkReply::SslHandshakeFailedError: return "SslHandshakeFailedError";
case QNetworkReply::TemporaryNetworkFailureError: return "TemporaryNetworkFailureError";
case QNetworkReply::ProxyConnectionRefusedError: return "ProxyConnectionRefusedError";
case QNetworkReply::ProxyConnectionClosedError: return "ProxyConnectionClosedError";
case QNetworkReply::ProxyNotFoundError: return "ProxyNotFoundError";
case QNetworkReply::ProxyTimeoutError: return "ProxyTimeoutError";
case QNetworkReply::ProxyAuthenticationRequiredError: return "ProxyAuthenticationRequiredError";
case QNetworkReply::ContentAccessDenied: return "ContentAccessDenied";
case QNetworkReply::ContentOperationNotPermittedError: return "ContentOperationNotPermittedError";
case QNetworkReply::ContentNotFoundError: return "ContentNotFoundError";
case QNetworkReply::AuthenticationRequiredError: return "AuthenticationRequiredError";
case QNetworkReply::ContentReSendError: return "ContentReSendError";
case QNetworkReply::ProtocolUnknownError: return "ProtocolUnknownError";
case QNetworkReply::ProtocolInvalidOperationError: return "ProtocolInvalidOperationError";
case QNetworkReply::UnknownNetworkError: return "UnknownNetworkError";
case QNetworkReply::UnknownProxyError: return "UnknownProxyError";
case QNetworkReply::UnknownContentError: return "UnknownContentError";
case QNetworkReply::ProtocolFailure: return "ProtocolFailure";
}
return "UnknownError";
}
QString HttpClient::text() const
{
return m_text;
}
#include "HumanDetection.h"
HumanDetection* HumanDetection::instance = nullptr;
HumanDetection::HumanDetection(){
}
HumanDetection::~HumanDetection(){
}
int HumanDetection::findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector *detector){
TCV_CameraStream *stream = TCV_CreateCameraStream();
TCV_CameraStreamSetData(stream, source.data, source.cols, source.rows);
TCV_CameraStreamSetRotationMode(stream, TCV_CAMERA_ROTATION_0);
TCV_CameraStreamSetStreamFormat(stream, TCV_STREAM_BGR);
//0是人 1是车
// 执行一帧目标检测
TCV_HumanDetectorProcessFrame(detector, stream);
int num = (res == 0) ? TCV_HumanDetectorGetNumOfHuman(detector) : 1;//TCV_HumanDetectorGetNumOfCar(detector);
qDebug() << (res == 0 ? "Number of people detected:" : "Number of cars detected:") << num;
TCV_ReleaseCameraStream(stream);
return num;
}
#ifndef HUMANDETECTION_H
#define HUMANDETECTION_H
#include "so_human_sdk.h"
#include <opencv2/opencv.hpp>
#include <QDebug>
class HumanDetection
{
public:
HumanDetection();
~HumanDetection();
void initDetector();
int findHuManCar(const cv::Mat &source,int res,TCV_HumanDetector *detector);
static HumanDetection& getInstance()
{
static HumanDetection instance;
return instance;
}
private:
static HumanDetection* instance;
};
#endif // HUMANDETECTION_H
#include "Common.h"
#include "LicensePlateRecognition.h"
LicensePlateRecognition::LicensePlateRecognition() {}
LicensePlateRecognition::~LicensePlateRecognition(){
}
LicensePlateRecognition* LicensePlateRecognition::instance = nullptr;
//void LicensePlateRecognition::initHlprContext(const QString &modelPaths, const QString &carCascade, float carConfidence){
// HLPR_ContextConfiguration configuration = {0};
// QByteArray && by_mpath=modelPaths.toUtf8();
// char* m_path=by_mpath.data();
// configuration.models_path = m_path;
// configuration.max_num = 5;
// configuration.det_level = DETECT_LEVEL_LOW;
// configuration.use_half = false;
// configuration.nms_threshold = 0.5f;
// configuration.rec_confidence_threshold = carConfidence;
// configuration.box_conf_threshold = 0.30f;
// configuration.threads = 1;
// this->carCascadeUrl=carCascade;
// ctx = HLPR_CreateContext(&configuration);
//}
void LicensePlateRecognition::oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber){
HLPR_ImageData data = {0};
data.data = source.data;
data.width = source.cols;
data.height = source.rows;
data.format = STREAM_BGR;
data.rotation = CAMERA_ROTATION_0;
// create DataBuffer
P_HLPR_DataBuffer buffer = HLPR_CreateDataBuffer(&data);
// 检测图片数据流是否正确,如果出现崩溃或图像不符合预期 则表示数据流格式异常导致内部编码出错
HLPR_DataBufferTest(buffer, "test_buffer.jpg");
// create context
HLPR_ContextConfiguration configuration = {0};
QByteArray && by_mpath=modelPaths.toUtf8();
char* m_path=by_mpath.data();
configuration.models_path = m_path;
configuration.max_num = 5;
configuration.det_level = DETECT_LEVEL_LOW;
configuration.use_half = false;
configuration.nms_threshold = 0.5f;
configuration.rec_confidence_threshold = 0.8f;
configuration.box_conf_threshold = 0.30f;
configuration.threads = 1;
P_HLPR_Context ctx1 = HLPR_CreateContext(&configuration);
HREESULT ret = HLPR_ContextQueryStatus(ctx1);
if (ret != HResultCode::Ok) {
printf("create error.\n");
return ;
}
// exec plate recognition
HLPR_PlateResultList results = {0};
HLPR_ContextUpdateStream(ctx1, buffer, &results);
QStringList lpResults;
for (int i = 0; i < results.plate_size; ++i) {
std::string type;
if (results.plates[i].type == HLPR_PlateType::PLATE_TYPE_UNKNOWN) {
type = "未知";
} else {
type = types[results.plates[i].type];
}
qDebug()<<QString("车牌号:%1").arg(results.plates[i].code);
QString plateResult = QString("第%1个,%2,车牌号:%3,置信度:%4,左上角点x坐标:%5,左上角点y坐标:%6,右下角点x坐标:%7,右下角点y坐标:%8")
.arg(i + 1).arg(type.c_str()).arg(results.plates[i].code)
.arg(results.plates[i].text_confidence).arg(results.plates[i].x1, 0, 'Q', 4)
.arg(results.plates[i].y1, 0, 'Q', 4).arg(results.plates[i].x2, 0, 'Q', 4)
.arg(results.plates[i].y2, 0, 'Q', 4);
lpResults.append(plateResult);
}
lpNumber =lpResults.join("\n");
// release buffer
HLPR_ReleaseDataBuffer(buffer);
// release context
HLPR_ReleaseContext(ctx1);
}
void LicensePlateRecognition::licensePlateNumber(const cv::Mat &source, QString &lpNumber,vides_data::requestLicensePlate &plate,
qint64 currentTime,P_HLPR_Context ctx) {
// 执行一帧图像数据检测行人
// create ImageData
HLPR_ImageData data = {0};
data.data = source.data;
data.width = source.cols;
data.height = source.rows;
data.format = STREAM_BGR;
data.rotation = CAMERA_ROTATION_0;
// create DataBuffer
P_HLPR_DataBuffer buffer = HLPR_CreateDataBuffer(&data);
HREESULT ret = HLPR_ContextQueryStatus(ctx);
if (ret != HResultCode::Ok) {
qInfo()<<QString("create error");
return ;
}
// exec plate recognition
HLPR_PlateResultList results = {0};
HLPR_ContextUpdateStream(ctx, buffer, &results);
if (results.plate_size <= 0) {
HLPR_ReleaseDataBuffer(buffer);
return;
}
QStringList lpResults;
for (int i = 0; i < results.plate_size; ++i) {
std::string type;
if (results.plates[i].type == HLPR_PlateType::PLATE_TYPE_UNKNOWN) {
type = "未知";
} else {
type = types[results.plates[i].type];
}
vides_data::LicensePlate newPlate;
newPlate.time=currentTime;
newPlate.new_color=QString::fromStdString(type);
newPlate.new_plate=QString::fromUtf8(results.plates[i].code);
vides_data::ParkingArea area;
area.topLeftCornerX=results.plates[i].x1;
area.topLeftCornerY=results.plates[i].y1;
area.bottomLeftCornerX=results.plates[i].x1;
area.bottomLeftCornerY=results.plates[i].y2;
area.topRightCornerX=results.plates[i].x2; // 修改这里
area.topRightCornerY=results.plates[i].y1; // 修改这里
area.bottomRightCornerX=results.plates[i].x2;
area.bottomRightCornerY=results.plates[i].y2;
newPlate.recognition=area;
QString plateResult = QString("第%1个,%2,车牌号:%3,置信度:%4,左上角点x坐标:%5,左上角点y坐标:%6,右下角点x坐标:%7,右下角点y坐标:%8")
.arg(i + 1).arg(type.c_str()).arg(results.plates[i].code)
.arg(results.plates[i].text_confidence).arg(results.plates[i].x1, 0, 'Q', 4)
.arg(results.plates[i].y1, 0, 'Q', 4).arg(results.plates[i].x2, 0, 'Q', 4)
.arg(results.plates[i].y2, 0, 'Q', 4);
//QString plateResult = QString(results.plates[i].code);
plate.plates.push_front(newPlate);
lpResults.append(plateResult);
}
lpNumber =lpResults.join("\t");
HLPR_ReleaseDataBuffer(buffer);
}
#ifndef LICENSEPLATERECOGNITION_H
#define LICENSEPLATERECOGNITION_H
#include "hyper_lpr_sdk.h"
#include "LogHandle.h"
#include "VidesData.h"
#include <QString>
#include <opencv2/opencv.hpp>
#include <QTextStream>
#include <QFile>
#include <QImage>
#include <mutex>
const std::vector<std::string> types =
{"蓝牌", "黄牌单层", "白牌单层", "绿牌新能源", "黑牌港澳",
"香港单层", "香港双层", "澳门单层", "澳门双层", "黄牌双层"};
class LicensePlateRecognition{
public:
static LicensePlateRecognition& getInstance()
{
static LicensePlateRecognition instance;
return instance;
}
//识别车牌号
void licensePlateNumber(const cv::Mat &source,QString & lpNumber, vides_data::requestLicensePlate &plate,
qint64 currentTime,P_HLPR_Context ctx);
void oldLicensePlateNumber(const cv::Mat &source,const QString &modelPaths,QString & lpNumber);
// void initHlprContext(const QString &modelPaths,const QString &carCascade,float carConfidence);
private:
static LicensePlateRecognition* instance;
//P_HLPR_Context ctx ;
float carConfidence;
std::mutex carMutex;
LicensePlateRecognition();
~LicensePlateRecognition();
};
#endif // LICENSEPLATERECOGNITION_H
#ifndef LOGHANDLER_H
#define LOGHANDLER_H
#include <iostream>
#include <QDebug>
#include <QDateTime>
#include <QMutexLocker>
#include <QDir>
#include <QFile>
#include <QFileInfo>
#include <QTimer>
#include <QTextStream>
#include <QTextCodec>
const int g_logLimitSize = 5;
struct LogHandlerPrivate {
LogHandlerPrivate();
~LogHandlerPrivate();
// 打开日志文件 log.txt,如果日志文件不是当天创建的,则使用创建日期把其重命名为 yyyy-MM-dd.log,并重新创建一个 log.txt
void openAndBackupLogFile();
void checkLogFiles(); // 检测当前日志文件大小
void autoDeleteLog(); // 自动删除30天前的日志
// 消息处理函数
static void messageHandler(QtMsgType type, const QMessageLogContext &context, const QString &msg);
QDir logDir; // 日志文件夹
QTimer renameLogFileTimer; // 重命名日志文件使用的定时器
QTimer flushLogFileTimer; // 刷新输出到日志文件的定时器
QDate logFileCreatedDate; // 日志文件创建的时间
static QFile *logFile; // 日志文件
static QTextStream *logOut; // 输出日志的 QTextStream,使用静态对象就是为了减少函数调用的开销
static QMutex logMutex; // 同步使用的 mutex
};
class LogHandler {
public:
void installMessageHandler(); // 给Qt安装消息处理函数
void uninstallMessageHandler(); // 取消安装消息处理函数并释放资源
static LogHandler& Get() {
static LogHandler m_logHandler;
return m_logHandler;
}
private:
LogHandler();
LogHandlerPrivate *d;
};
#endif // LOGHANDLER_H
#include "LogHandle.h"
#include <qthread.h>
/************************************************************************************************************
* *
* LogHandlerPrivate *
* *
***********************************************************************************************************/
// 初始化 static 变量
QMutex LogHandlerPrivate::logMutex;
QFile* LogHandlerPrivate::logFile = nullptr;
QTextStream* LogHandlerPrivate::logOut = nullptr;
LogHandlerPrivate::LogHandlerPrivate() {
logDir.setPath("log"); // TODO: 日志文件夹的路径,为 exe 所在目录下的 log 文件夹,可从配置文件读取
QString logPath = logDir.absoluteFilePath("today.log"); // 获取日志的路径
// ========获取日志文件创建的时间========
// QFileInfo::created(): On most Unix systems, this function returns the time of the last status change.
// 所以不能运行时使用这个函数检查创建时间,因为会在运行时变化,于是在程序启动时保存下日志文件的最后修改时间,
logFileCreatedDate = QFileInfo(logPath).lastModified().date(); // 若日志文件不存在,返回nullptr
// 打开日志文件,如果不是当天创建的,备份已有日志文件
openAndBackupLogFile();
// 十分钟检查一次日志文件创建时间
renameLogFileTimer.setInterval(1000*2); // TODO: 可从配置文件读取
renameLogFileTimer.start();
QObject::connect(&renameLogFileTimer, &QTimer::timeout, [this] {
QMutexLocker locker(&LogHandlerPrivate::logMutex);
openAndBackupLogFile(); // 打开日志文件
checkLogFiles(); // 检测当前日志文件大小
// autoDeleteLog(); // 自动删除30天前的日志
});
// 定时刷新日志输出到文件,尽快的能在日志文件里看到最新的日志
flushLogFileTimer.setInterval(1000); // TODO: 可从配置文件读取
flushLogFileTimer.start();
QObject::connect(&flushLogFileTimer, &QTimer::timeout, [] {
// qDebug() << QDateTime::currentDateTime().toString("yyyy-MM-dd hh:mm:ss"); // 测试不停的写入内容到日志文件
QMutexLocker locker(&LogHandlerPrivate::logMutex);
if (nullptr != logOut) {
logOut->flush();
}
});
}
LogHandlerPrivate::~LogHandlerPrivate() {
if (nullptr != logFile) {
logFile->flush();
logFile->close();
delete logOut;
delete logFile;
// 因为他们是 static 变量
logOut = nullptr;
logFile = nullptr;
}
}
// 打开日志文件 log.txt,如果不是当天创建的,则使用创建日期把其重命名为 yyyy-MM-dd.log,并重新创建一个 log.txt
void LogHandlerPrivate::openAndBackupLogFile() {
// 总体逻辑:
// 1. 程序启动时 logFile 为 nullptr,初始化 logFile,有可能是同一天打开已经存在的 logFile,所以使用 Append 模式
// 2. logFileCreatedDate is nullptr, 说明日志文件在程序开始时不存在,所以记录下创建时间
// 3. 程序运行时检查如果 logFile 的创建日期和当前日期不相等,则使用它的创建日期重命名,然后再生成一个新的 log.txt 文件
// 4. 检查日志文件超过 LOGLIMIT_NUM 个,删除最早的
// 备注:log.txt 始终为当天的日志文件,当第二天,会执行第3步,将使用 log.txt 的创建日期重命名它
// 如果日志所在目录不存在,则创建
if (!logDir.exists()) {
logDir.mkpath("."); // 可以递归的创建文件夹
}
QString logPath = logDir.absoluteFilePath("today.log"); // log.txt的路径
// [[1]] 程序每次启动时 logFile 为 nullptr
if (logFile == nullptr) {
logFile = new QFile(logPath);
logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : nullptr;
if (logOut != nullptr)
logOut->setCodec("UTF-8");
// [[2]] 如果文件是第一次创建,则创建日期是无效的,把其设置为当前日期
if (logFileCreatedDate.isNull()) {
logFileCreatedDate = QDate::currentDate();
}
}
if (logFileCreatedDate != QDate::currentDate()) {
logFile->flush();
logFile->close();
delete logOut;
delete logFile;
QString newLogPath = logDir.absoluteFilePath(logFileCreatedDate.toString("yyyy-MM-dd.log"));
QFile::rename(logPath, newLogPath); // 将原始日志文件重命名为新的文件名
logFile = new QFile(logPath);
logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : nullptr;
logFileCreatedDate = QDate::currentDate();
if (logOut != nullptr)
logOut->setCodec("UTF-8");
}
}
// 检测当前日志文件大小
void LogHandlerPrivate::checkLogFiles() {
// 如果 protocal.log 文件大小超过5M,重新创建一个日志文件,原文件存档为yyyy-MM-dd_hhmmss.log
if (logFile->size() > 1024*g_logLimitSize) {
logFile->flush();
logFile->close();
delete logOut;
delete logFile;
QString logPath = logDir.absoluteFilePath("today.log"); // 日志的路径
QString newLogPath = logDir.absoluteFilePath(logFileCreatedDate.toString("yyyy-MM-dd.log"));
QFile::rename(logPath, newLogPath);
logFile = new QFile(logPath);
logOut = (logFile->open(QIODevice::WriteOnly | QIODevice::Text | QIODevice::Append)) ? new QTextStream(logFile) : NULL;
logFileCreatedDate = QDate::currentDate();
if (logOut != nullptr)
logOut->setCodec("UTF-8");
}
}
// 自动删除30天前的日志
void LogHandlerPrivate::autoDeleteLog()
{
QDateTime now = QDateTime::currentDateTime();
// 前30天
QDateTime dateTime1 = now.addDays(-15);
QDateTime dateTime2;
QString logPath = logDir.absoluteFilePath(""); // 日志的路径
QDir dir(logPath);
QStringList filename ;
filename << "*.log";//可叠加,可使用通配符筛选
QFileInfoList fileList = dir.entryInfoList(filename);
foreach (QFileInfo f, fileList) {
// "."和".."跳过
if (f.baseName() == "" || f.baseName()=="today" )
continue;
dateTime2 = QDateTime::fromString(f.baseName(), "yyyy-MM-dd");
if (dateTime2 < dateTime1) { // 只要日志时间小于前30天的时间就删除
dir.remove(f.absoluteFilePath());
}
}
}
// 消息处理函数
void LogHandlerPrivate::messageHandler(QtMsgType type, const QMessageLogContext &context, const QString &msg) {
QMutexLocker locker(&LogHandlerPrivate::logMutex);
QString level;
switch (type) {
case QtDebugMsg:
level = "DEBUG";
break;
case QtInfoMsg:
level = "INFO ";
break;
case QtWarningMsg:
level = "WARN ";
break;
case QtCriticalMsg:
level = "ERROR";
break;
case QtFatalMsg:
level = "FATAL";
break;
default:
break;
}
// 输出到标准输出: Windows 下 std::cout 使用 GB2312,而 msg 使用 UTF-8,但是程序的 Local 也还是使用 UTF-8
#if defined(Q_OS_WIN)
QByteArray localMsg = QTextCodec::codecForName("GB2312")->fromUnicode(msg); //msg.toLocal8Bit();
#else
QByteArray localMsg = msg.toLocal8Bit();
#endif
std::cout << std::string(localMsg) << std::endl;
if (nullptr == LogHandlerPrivate::logOut) {
return;
}
// 输出到日志文件, 格式: 时间 - [Level] (文件名:行数, 函数): 消息
QString fileName = context.file;
int index = fileName.lastIndexOf(QDir::separator());
fileName = fileName.mid(index + 1);
(*LogHandlerPrivate::logOut) << QString("%1 - [%2] (%3:%4, %5): %6\n")
.arg(QDateTime::currentDateTime().toString("yyyy-MM-dd hh:mm:ss")).arg(level)
.arg(fileName).arg(context.line).arg(context.function).arg(msg);
}
/************************************************************************************************************
* *
* LogHandler *
* *
***********************************************************************************************************/
LogHandler::LogHandler() : d(nullptr) {
}
// 给Qt安装消息处理函数
void LogHandler::installMessageHandler() {
QMutexLocker locker(&LogHandlerPrivate::logMutex); // 类似C++11的lock_guard,析构时自动解锁
if (nullptr == d) {
d = new LogHandlerPrivate();
qInstallMessageHandler(LogHandlerPrivate::messageHandler); // 给 Qt 安装自定义消息处理函数
}
}
// 取消安装消息处理函数并释放资源
void LogHandler::uninstallMessageHandler() {
QMutexLocker locker(&LogHandlerPrivate::logMutex);
qInstallMessageHandler(nullptr);
delete d;
d = nullptr;
}
#include "MediaFaceImage.h"
#include "CameraHandle.h"
MediaFaceImage* MediaFaceImage::m_instance = nullptr; // 初始化指针为空
MediaFaceImage::MediaFaceImage()
{
}
MediaFaceImage::~MediaFaceImage()
{
XSDK_UnInit();
}
MediaFaceImage* MediaFaceImage::getInstance()
{
if (m_instance == nullptr) // 检查指针是否为空
{
m_instance = new MediaFaceImage(); // 创建新的实例并指向它
}
return m_instance; // 返回指向实例的指针
}
std::map<int,CameraHandle*>MediaFaceImage::getCurrentDevice(){
return currentDevice;
}
void MediaFaceImage::clearCurrentDevice(int hObject){
currentDevice.erase(hObject);
}
void MediaFaceImage::setMap(int &key,CameraHandle*value){
if(currentDevice.count(key)<=0){
currentDevice.insert(std::make_pair(key, value));
}
}
static int sdkInitCallback(XSDK_HANDLE hObject, int nMsgId, int nParam1,
int nParam2, int nParam3, const char* szString, void* pObject,
int64 lParam, int nSeq, void* pUserData, void* pMsg){
if (pUserData == nullptr) {
qInfo() << "pUserData 为空";
return -1;
}
switch (nMsgId)
{
case ESXSDK_ON_DEV_STATE:
{
printf("ESXSDK_ON_DEV_STATE[%s]\r\n", nParam1 == 6 ? "ESTATE_DEV_Logined" : "ESTATE_DEV_NetDisConnect");
}
break;
case EXSDK_DATA_FORMATE_FRAME:
break;
case EXCMD_ALARM_REQ:
{
MediaFaceImage* mediaFaceImage = static_cast<MediaFaceImage*>(pUserData);
if(mediaFaceImage->getCurrentDevice().count(hObject)>0){
QString qString(szString);
CameraHandle* cameraHandle= mediaFaceImage->getCurrentDevice().at(hObject);
QThreadPool* threadPool = QThreadPool::globalInstance();
auto taskCallBack=std::bind(&CameraHandle::callbackFunction, cameraHandle, hObject, qString);
auto taskRunnable = new TaskRunnable(taskCallBack, hObject,cameraHandle->getChannel(), RunFunction::SdkCallbackFunction);
// task->setAutoDelete(false); // 确保task不会在执行后被自动删除
threadPool->start(taskRunnable);
// if (!threadPool->tryStart(task)) { // 尝试启动任务,如果线程池满了则不会启动
// qDebug() << "线程池已满,无法启动TaskRunnable";
// }
}
}
break;
default:
break;
}
return 0;
}
int MediaFaceImage::SdkSearchDevicesSyn(std::map<QString, vides_data::localDeviceStatus *> &devices){
int nMaxCount = 100;
int nActualCount = 0;
SXSDK_CONFIG_NET_COMMON* pRet = new SXSDK_CONFIG_NET_COMMON[nMaxCount];
memset(pRet, 0, sizeof(SXSDK_CONFIG_NET_COMMON) * nMaxCount);
nActualCount = XSDK_SearchDevicesSyn(pRet, nMaxCount);
printf("nCount:%d\r\n", nActualCount);
if (nActualCount <= 0)
{
qDebug() << QString("Search no Device");
delete[] pRet;
return -1;
}
if (nActualCount >= 0)
{
for (int i = 0; i < nActualCount; i++)
{
qDebug() << QString("[%1][IP:%2.%3.%4.%5][SN:%6][Mac:%7]")
.arg(i)
.arg(pRet[i].HostIP.c[0])
.arg(pRet[i].HostIP.c[1])
.arg(pRet[i].HostIP.c[2])
.arg(pRet[i].HostIP.c[3])
.arg(pRet[i].sSn)
.arg(pRet[i].sMac);
vides_data::localDeviceStatus *pDevice=new vides_data::localDeviceStatus();
pDevice->sSn=QString::fromUtf8(pRet[i].sSn);
pDevice->HostIP=pRet[i].HostIP;
pDevice->TCPPort=pRet[i].TCPPort;
pDevice->HttpPort=pRet[i].HttpPort;
pDevice->UserName= QString::fromUtf8(pRet[i].DefaultUser,sizeof(pRet[i].DefaultUser));
pDevice->password= QString::fromUtf8(pRet[i].DefaultPwd,sizeof(pRet[i].DefaultPwd));
devices.insert(std::make_pair(pDevice->sSn, pDevice));
}
delete[] pRet;
}
return nActualCount;
}
int MediaFaceImage::SdkInit(QString &szConfigPath, QString &szTempPath) {
SXSDKInitParam *pParam=new SXSDKInitParam();
pParam->nLogLevel=8;
QByteArray && byConfigPath=szConfigPath.toLocal8Bit();
strcpy(pParam->szConfigPath, byConfigPath.data());
QByteArray && byTempPath = szTempPath.toLocal8Bit();
strcpy(pParam->szTempPath, byTempPath.data());
SMsgReceiver sms(nullptr,sdkInitCallback,this);
pParam->mainMsgCallBack=sms;
int initResult= XSDK_Init(pParam);
if(initResult<0){
qInfo() << "sdk 初始化失败";
return initResult;
}
return initResult;
}
void MediaFaceImage::ParserImageData(const unsigned char* pData, int nDataLen, char* pJpg, int* nJpgLen, char* pJson)
{
quint32 nPicLen = static_cast<quint32>(pData[0]) | (static_cast<quint32>(pData[1]) << 8) | (static_cast<quint32>(pData[2]) << 16) | (static_cast<quint32>(pData[3]) << 24);
qDebug() << "nPicLen =" << nPicLen;
*nJpgLen = static_cast<int>(nPicLen);
memcpy(pJpg, (pData + 32), *nJpgLen);
const unsigned char* pInfoHead = nullptr;
for (int i = nDataLen - 2; i > -1; i--)
{
if (pData[i] == 0xff && pData[i + 1] == 0xd9)
{
pInfoHead = pData + i;
break;
}
}
if (pInfoHead != nullptr)
{
strcpy(pJson, reinterpret_cast<const char*>(pInfoHead + 2));
}
}
int MediaFaceImage::AbFile(const char* pFileName, const void* pData, int nLength) {
if (pData == NULL || nLength <= 0) {
return -2;
}
FILE* fp = fopen(pFileName, "ab+");
if (fp == NULL) {
// 文件打开失败
return -1;
}
size_t written = fwrite(pData, 1, nLength, fp);
if (written != nLength) {
// 写入的数据长度与预期不符
fclose(fp);
return -3;
}
fflush(fp); // 确保数据被写入
fclose(fp);
return 0;
}
int MediaFaceImage::ToFile(const char* pFileName, const void* pData, int nLength)
{
if (pData == NULL || nLength <= 0)
{
// 数据指针为空或长度不正确
return -2;
}
FILE* fp = fopen(pFileName, "wb");
if (fp == NULL)
{
// 文件打开失败
return -1;
}
size_t written = fwrite(pData, 1, nLength, fp);
if (written != nLength)
{
// 写入的数据长度与预期不符
fclose(fp);
return -3;
}
fflush(fp); // 确保数据被写入
fclose(fp);
return 0;
}
int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat &image) {
const int BufferSize = 1024 * 1024 * 2; // 定义缓冲区大小
// 使用智能指针管理资源
std::unique_ptr<unsigned char[]> pOutBuffer(new unsigned char[BufferSize]);
int pInOutBufferSize = 0;
int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer.get(), &pInOutBufferSize);
if (ret < 0 || pInOutBufferSize <= 0) {
qInfo() << "同步设备端抓图失败";
return -1;
}
// 使用vector管理buffer
std::vector<uchar> buffer(pInOutBufferSize);
memcpy(buffer.data(), pOutBuffer.get(), pInOutBufferSize);
image = cv::imdecode(buffer, cv::IMREAD_UNCHANGED);
return pInOutBufferSize; // pOutBuffer由智能指针管理,此处无需手动释放
}
//int MediaFaceImage::FaceImageCallBack(XSDK_HANDLE hMedia, int nChannel, cv::Mat &image)
//{
// // static const int BufferSize = 1024 * 1024 * 2;
// // static unsigned char pOutBuffer[BufferSize];
// const int BufferSize = 1024 * 1024 * 2;
// unsigned char* pOutBuffer = new unsigned char[BufferSize];
// int pInOutBufferSize = 0;
// int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer, &pInOutBufferSize);
// if (ret < 0 || pInOutBufferSize<=0 ) {
// qInfo() << "同步设备端抓图失败";
// if (pOutBuffer)
// {
// delete[]pOutBuffer;
// pOutBuffer = nullptr;;
// }
// return -1;
// }
// std::vector<uchar> buffer(pInOutBufferSize);
// memcpy(buffer.data(), pOutBuffer, pInOutBufferSize);
// image =std::move(cv::imdecode(buffer, cv::IMREAD_UNCHANGED));;
// if (pOutBuffer)
// {
// delete[]pOutBuffer;
// pOutBuffer = nullptr;;
// }
// return pInOutBufferSize;
//}
int MediaFaceImage::CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer){
static const int BufferSize = 1024 * 1024 * 2; // 2MB buffer size
static unsigned char pOutBuffer[BufferSize];
// 初始化为0,用于接收实际填充的大小
int pInOutBufferSize = 0;
// 尝试从设备获取快照数据
int ret = XSDK_DevSnapSyn(hMedia, nChannel, "", pOutBuffer, &pInOutBufferSize);
if (ret < 0 || pInOutBufferSize <= 0) {
qInfo() << "同步设备端抓图失败";
return -1; // 返回-1表示失败
}
// 用pOutBuffer里的数据初始化vector,复制数据到vector中
buffer = std::vector<uchar>(pOutBuffer, pOutBuffer + pInOutBufferSize);
// 返回实际填入Vector的数据大小
return pInOutBufferSize;
}
#ifndef MEDIAFACEIMAGE_H
#define MEDIAFACEIMAGE_H
#include "XSDKPublic.h"
#include "XNetSDKSyn.h"
#include "XNetSDKDefine.h"
#include "VidesData.h"
#include "Common.h"
#include "TaskRunnable.h"
#include <memory>
#include <map>
#include <QDebug>
#include <QThreadPool>
#include <opencv2/opencv.hpp>
class CameraHandle;
class MediaFaceImage
{
public:
static MediaFaceImage* getInstance(); // 单例模式获取实例的静态成员函数
void ParserImageData(const unsigned char* pData, int nDataLen, char* pJpg, int* nJpgLen, char* pJson);
int FaceImageCallBack(XSDK_HANDLE hMedia,int nChannel,cv::Mat &image);
int CameraImage(XSDK_HANDLE hMedia,int nChannel,std::vector<uchar> &buffer);
int ToFile(const char* pFileName, const void* pData, int nLenght);
int AbFile(const char* pFileName, const void* pData, int nLenght);
int SdkSearchDevicesSyn(std::map< QString,vides_data::localDeviceStatus*>& devices);
int SdkInit(QString &szConfigPath, QString &szTempPath);
std::map<int,CameraHandle*>getCurrentDevice();
void clearCurrentDevice(int hObject);
void setMap(int &key,CameraHandle*value);
private:
MediaFaceImage(); // 构造函数声明为私有
~MediaFaceImage(); // 析构函数声明为私有
std::map<int,CameraHandle*>currentDevice;
static MediaFaceImage* m_instance; // 指向实例的指针
};
#endif // MEDIAFACEIMAGE_H
#ifndef MYWRAPPER_H
#define MYWRAPPER_H
class MyWrapper {
public:
MyWrapper();
~MyWrapper();
void doSomething(int hObject,int nMsgId, int nParam1, int nParam2, int nParam3, const char* szString, void* pObject, long long lParam, int nSeq, void* pUserData, void* pMsg);
private:
class Impl;
Impl* m_pImpl;
};
#endif // MYWRAPPER_H
#include "NewHttpService.h"
NewHttpService::NewHttpService() {
}
NewHttpService::~NewHttpService() {
}
vides_data::response* NewHttpService::httpPostDeviceStatus(vides_data::requestDeviceStatus & deviceStatus) {
httpUrl.append("/api/v1.0/device/ping");
QJsonObject json;
json.insert("sn",deviceStatus.sSn);
json.insert("type",deviceStatus.type);
json.insert("status",deviceStatus.status);
QJsonDocument jsonDoc;
jsonDoc.setObject(json);
QByteArray bytearr= jsonDoc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
if(m_httpClient.post(request,bytearr)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
resp->msg=map["msg"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
vides_data::response *NewHttpService::httpFindCameras(QString &serialNumber,std::list<vides_data::responseDeviceStatus*>&datas) {
httpUrl.append("/api/v1.0/device/all");
vides_data::response *resp=new vides_data::response();
QUrlQuery query;
query.addQueryItem("sn",serialNumber);
QNetworkRequest request;
QUrl url(httpUrl);
url.setQuery(query);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
if(m_httpClient.get(request)){
QByteArray && byte=m_httpClient.text().toUtf8();
QJsonDocument docujson= QJsonDocument::fromJson(byte.data());
QJsonObject maps= docujson.object();
QVariantMap map =std::move(maps.toVariantMap());
resp->code=map["code"].toInt();
QJsonArray dataArray = map["data"].toJsonArray(); // 将"data"字段转换为QJsonArray
for (const QJsonValue& value : dataArray) {
vides_data::responseDeviceStatus *res=new vides_data::responseDeviceStatus();
QJsonObject dataObject = value.toObject(); // 将数组中的每个元素转换为QJsonObject
// 从每个对象中获取所需的数据并进行处理
QString sn = dataObject["sn"].toString();
res->sSn=dataObject["sn"].toString();
res->type=dataObject["type"].toInt();
res->merchant_id= dataObject["merchant_id"].toInt();
datas.push_back(res);
}
resp->msg=map["msg"].toString();
}else{
qDebug()<<m_httpClient.errorCode();
resp->code=2;
resp->msg=OPERATION_FAILED;
}
return resp;
}
#ifndef NEWHTTPSERVICE_H
#define NEWHTTPSERVICE_H
#include <QObject>
#include "HttpClient.h"
#include "VidesData.h"
#include "Common.h"
#include <list>
#include <QJsonObject>
#include <QJsonArray>
#include <QJsonDocument>
#include <QUrlQuery>
const QString OPERATION_FAILED = "操作失败";
const QString OPERATION_SUCCESS = "操作成功";
class NewHttpService : public QObject {
Q_OBJECT
public:
static NewHttpService& getInstance() {
static NewHttpService instance;
return instance;
}
vides_data::response* httpPostDeviceStatus(vides_data::requestDeviceStatus & deviceStatus);
vides_data::response *httpFindCameras(QString &serialNumber,std::list<vides_data::responseDeviceStatus*>&datas);
void setHttpUrl(const QString& url);
public:
NewHttpService();
~NewHttpService();
HttpClient m_httpClient;
QString httpUrl;
};
#endif // NEWHTTPSERVICE_H
#include "ParkingSpaceInfo.h"
ParkingSpaceInfo::ParkingSpaceInfo(RecognizedInfo &currentPlate)
:currentPlate(currentPlate)
{
}
ParkingSpaceInfo::ParkingSpaceInfo(){
}
ParkingSpaceInfo::~ParkingSpaceInfo(){
}
void ParkingSpaceInfo::addQueue(RecognizedInfo &info){
QMutexLocker locker(&queueMutex);
queuels.enqueue(info);
}
void ParkingSpaceInfo::removeQueue(){
QMutexLocker locker(&queueMutex);
if (!queuels.isEmpty()) {
queuels.dequeue();
}
}
void ParkingSpaceInfo::removeNoQueue() {
QMutexLocker locker(&queueMutex);
if (!queuels.isEmpty() && queuels.size() > 3) {
// 逆向遍历,这样在移除元素时不会影响还未遍历到的元素的索引
for (int i = queuels.size() - 1; i >= 0; --i) {
if (queuels[i].getLicensePlate().isEmpty()) {
queuels.removeAt(i);
}
}
}
}
QQueue<RecognizedInfo> &ParkingSpaceInfo::getQueue(){
return queuels;
}
void ParkingSpaceInfo::setArea(vides_data::ParkingArea &a){
QMutexLocker locker(&queueMutex);
this->area=a;
}
vides_data::ParkingArea& ParkingSpaceInfo::getArea(){
return area;
}
RecognizedInfo& ParkingSpaceInfo::getCurrentPlate(){
return currentPlate;
}
void ParkingSpaceInfo::setCurrentPlate(RecognizedInfo &current){
QMutexLocker locker(&queueMutex);
this->currentPlate=current;
}
int ParkingSpaceInfo::getSpaceIndex(){
return spaceIndex;
}
void ParkingSpaceInfo::setSpaceIndex(int spaceIndex){
QMutexLocker locker(&queueMutex);
this->spaceIndex=spaceIndex;
}
#ifndef PARKINGSPACEINFO_H
#define PARKINGSPACEINFO_H
#include "VidesData.h"
#include "RecognitionInfo.h"
#include <QMutex>
#include <QQueue>
class ParkingSpaceInfo {
public:
ParkingSpaceInfo(RecognizedInfo & currentPlate);
ParkingSpaceInfo();
~ParkingSpaceInfo();
RecognizedInfo& getCurrentPlate();
void setCurrentPlate(RecognizedInfo & current);
void addQueue(RecognizedInfo &info);
void removeQueue();
void removeNoQueue();
QQueue<RecognizedInfo> &getQueue();
void setArea(vides_data::ParkingArea &a);
vides_data::ParkingArea &getArea();
int getSpaceIndex();
void setSpaceIndex(int spaceIndex);
private:
QQueue<RecognizedInfo> queuels;
RecognizedInfo currentPlate;
vides_data::ParkingArea area;
int spaceIndex;
QMutex queueMutex;
};
#endif // PARKINGSPACEINFO_H
#ifndef QTQtHttpClient_H
#define QTQtHttpClient_H
#include <functional>
#include <QMap>
#include <QVariant>
#include <QStringList>
#include <QNetworkReply>
#include <QNetworkRequest>
#include <QNetworkAccessManager>
class QtHttpClientPrivate;
/**
* 对 QNetworkAccessManager 简单封装的 HTTP 访问客户端,简化 GET、POST、PUT、DELETE、上传、下载等操作。
* 在执行请求前设置需要的参数和回调函数:
* 1. 调用 header() 设置请求头
* 2. 调用 param() 设置参数,使用 Form 表单的方式提交请求,GET 请求的 query parameters 也可以用它设置
* 3. 调用 json() 设置 JSON 字符串的 request body,Content-Type 为 application/json,
* 当然也可以不是 JSON 格式,因使用 request body 的情况多数是使用 JSON 格式传递复杂对象,故命名为 json
* 4. 调用 success() 注册请求成功的回调函数
* 5. 调用 fail() 注册请求失败的回调函数
* 6. 调用 complete() 注册请求结束的回调函数
* success(), fail(), complete() 的回调函数是可选的,根据需要注册对应的回调函数,也可以一个都不注册
* 然后根据请求的类型调用 get(), post(), put(), remove(), download(), upload() 执行 HTTP 请求
*
* 默认 QtHttpClient 会创建一个 QNetworkAccessManager,如果不想使用默认的,调用 manager() 传入即可。
* 调用 debug(true) 设置为调试模式,输出调试信息如 URL、参数等。
*/
class QtHttpClient {
public:
QtHttpClient(const QString &url);
~QtHttpClient();
void stop2();
/**
* @brief 每创建一个 QNetworkAccessManager 对象都会创建一个线程,当频繁的访问网络时,为了节省线程资源,
* 可以传入 QNetworkAccessManager 给多个请求共享 (它不会被 QtHttpClient 删除,用户需要自己手动删除)。
* 如果没有使用 manager() 传入一个 QNetworkAccessManager,则 QtHttpClient 会自动的创建一个,并且在网络访问完成后自动删除它。
*
* @param manager 执行 HTTP 请求的 QNetworkAccessManager 对象
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& manager(QNetworkAccessManager *manager);
/**
* @brief 参数 debug 为 true 则使用 debug 模式,请求执行时输出请求的 URL 和参数等
*
* @param debug 是否启用调试模式
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& debug(bool debug);
/**
* @brief 添加一个请求的参数,可以多次调用添加多个参数
*
* @param name 参数的名字
* @param value 参数的值
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& param(const QString &name, const QVariant &value);
/**
* @brief 添加多个请求的参数
*
* @param ps QMap 类型的参数,key 为参数名,value 为参数值
* 可以使用 {{"name", 1}, {"box", 2}} 的方式创建 QMap 对象
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& params(const QMap<QString, QVariant> &ps);
/**
* @brief 添加请求的参数 (请求体),使用 Json 格式,例如 "{\"name\": \"Alice\"}"
*
* @param json 请求体 (request body) 为 Json 格式的参数字符串
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& json(const QString &json);
/**
* @brief 添加请求头
*
* @param name 请求头的名字
* @param value 请求头的值
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& header(const QString &name, const QString &value);
/**
* @brief 添加多个请求头
*
* @param nameValues 请求头的名字和值对
* 可以使用 {{"name", 1}, {"box", 2}} 的方式创建 QMap 对象
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& headers(const QMap<QString, QString> nameValues);
/**
* @brief 注册请求成功的回调函数
*
* @param successHandler 成功的回调函数,参数为响应的字符串
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& success(std::function<void(const QString &)> successHandler);
/**
* @brief 注册请求失败的回调函数
*
* @param failHandler 失败的回调函数,参数为失败原因和 HTTP 状态码
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& fail(std::function<void(const QString &, int)> failHandler);
/**
* @brief 注册请求结束的回调函数,不管成功还是失败请求结束后都会执行
*
* @param completeHandler 完成的回调函数,无参数
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& complete(std::function<void()> completeHandler);
/**
* @brief 设置请求响应的字符集,默认使用 UTF-8
*
* @param cs 字符集
* @return 返回 QtHttpClient 的引用,可以用于链式调用
*/
QtHttpClient& charset(const QString &cs);
/**
* @brief 执行 GET 请求
*/
void get();
/**
* @brief 执行 POST 请求
*/
void post();
/**
* @brief 执行 PUT 请求
*/
void put();
/**
* @brief 执行 DELETE 请求,由于 delete 是 C++ 的运算符,所以用同义词 remove
* 注意: Qt 提供的 DELETE 请求是不支持传递参数的,
* 请参考 QNetworkAccessManager::deleteResource(const QNetworkRequest &request)
*/
void remove();
/**
* @brief 使用 GET 进行下载,下载的文件保存到 savePath
*
* @param savePath 下载的文件保存路径
*/
void download(const QString &savePath);
/**
* @brief 上传单个文件
* 使用 POST 上传,服务器端获取文件的参数名为 file
*
* @param path 要上传的文件的路径
*/
void upload(const QString &path);
/**
* @brief 上传文件,文件的内容已经读取到 data 中
* 使用 POST 上传,服务器端获取文件的参数名为 file
*
* @param path 要上传的文件的路径
*/
void upload(const QByteArray &data);
/**
* @brief 上传多个文件
* 使用 POST 上传,服务器端获取文件的参数名为 files
*
* @param paths 要上传的文件的路径
*/
void upload(const QStringList &paths);
private:
QtHttpClientPrivate *d;
}
#endif // QTQtHttpClient_H
#ifndef RECOGNITIONINFO_H
#define RECOGNITIONINFO_H
#include <QString>
#include <map>
class RecognizedInfo {
public:
RecognizedInfo(const QString& plate, qint64 time, QString color);
RecognizedInfo();
~RecognizedInfo();
QString getLicensePlate();
qint64 getRecognizeTime() ;
QString getColor();
void setLicensePlate(const QString& plate);
void setRecognizeTime(qint64 time);
void setColor(QString &color);
private:
QString licensePlate; // 车牌号码
qint64 recognizeTime; // 识别时间
QString color;
};
#endif // RECOGNITIONINFO_H
#include "RecognitionInfo.h"
RecognizedInfo::RecognizedInfo(){
}
RecognizedInfo::RecognizedInfo(const QString& plate, qint64 time,
QString color)
: licensePlate(plate),
recognizeTime(time),
color(color)
{
}
// Getter 方法
QString RecognizedInfo::getLicensePlate() {
return licensePlate;
}
qint64 RecognizedInfo::getRecognizeTime() {
return recognizeTime;
}
QString RecognizedInfo::getColor(){
return color;
}
// Setter 方法
void RecognizedInfo::setLicensePlate(const QString& plate) {
this->licensePlate = plate;
}
void RecognizedInfo::setRecognizeTime(qint64 time) {
this->recognizeTime = time;
}
void RecognizedInfo::setColor(QString &color){
this->color=color;
}
RecognizedInfo::~RecognizedInfo(){
}
File added
_00_fdet_160
_01_lmk
_02_pose_fp16
_03_extract
_04_refine_net
_05_mask
_06_msafa27
_07_pose_q_fp16
#ifndef TASK_H
#define TASK_H
class Task
{
public:
Task() {}
~Task(){}
};
#endif // TASK_H
#include "TaskRunnable.h"
TaskRunnable::TaskRunnable(std::function<void()> newTask, int hDevice, int channel, RunFunction func)
:m_hDevice(hDevice), m_channel(channel), runFunction(func){
if (runFunction == SdkDevSnapSyn) {
this->devSnapSyn = newTask;
}
if(runFunction==SdkCallbackFunction){
this->callbackFunction = newTask;
}
this->setAutoDelete(true);
}
TaskRunnable::~TaskRunnable(){
static int i=0;
printf("TaskRunnable被析构%d次\n", ++i);
}
void TaskRunnable::setString(const QString& str){
this->szString=str;
}
void TaskRunnable::setHdevice(const int& hDevice){
this->m_hDevice=hDevice;
}
void TaskRunnable::setChannel(const int& channel){
this->m_channel=channel;
}
void TaskRunnable::setRunFunction(RunFunction func) {
this->runFunction = func;
}
void TaskRunnable::setDevSnapSyn(const DevSnapSyn& function) {
this->devSnapSyn = function;
}
void TaskRunnable::setCallbackFunction(const CallbackFunction& function) {
this->callbackFunction = function;
}
void TaskRunnable::run() {
try {
if (runFunction == SdkDevSnapSyn) {
devSnapSyn(); // 调用函数
} else if(runFunction == SdkCallbackFunction) {
callbackFunction(); // 调用函数
}
} catch (const std::exception& e) {
qDebug() << "在任务运行过程中发生异常:" << e.what();
} catch (...) {
qDebug() << "在任务运行过程中发生未知异常";
}
}
#ifndef TASKRUNNABLE_H
#define TASKRUNNABLE_H
#include <QMutex>
#include <QMutexLocker>
#include <QRunnable>
#include <QString>
#include <functional>
#include <QDebug>
enum RunFunction {
SdkDevSnapSyn,
SdkCallbackFunction
};
class TaskRunnable : public QRunnable {
public:
typedef std::function<void()> DevSnapSyn;
typedef std::function<void()> CallbackFunction;
~TaskRunnable();
TaskRunnable(std::function<void()> newTask, int hDevice, int channel, RunFunction function);
void setString(const QString& str);
void setHdevice(const int& hDevice);
void setChannel(const int& channel);
void setRunFunction(RunFunction func);
void setDevSnapSyn(const DevSnapSyn& function);
void setCallbackFunction(const CallbackFunction& function);
void run() override;
private:
DevSnapSyn devSnapSyn;
CallbackFunction callbackFunction;
int m_hDevice;
int m_channel;
QString szString;
RunFunction runFunction;
QMutex mutex;
};
#endif // TASKRUNNABLE_H
#include "ThreadSafeQueue.h"
template <typename T>
ThreadSafeQueue<T>::ThreadSafeQueue(int maxCount)
: maxCount_(maxCount)
{
}
template <typename T>
bool ThreadSafeQueue<T>::push(const T& value) {
std::lock_guard<std::mutex> lock(mutex_);
if (queue_.size() < maxCount_) {
queue_.push_back(value);
return true;
}
return false;
}
template <typename T>
bool ThreadSafeQueue<T>::pop(T& value) {
std::lock_guard<std::mutex> lock(mutex_);
if (!queue_.empty()) {
value = queue_.front();
queue_.pop_back();
return true;
}
return false;
}
template <typename T>
bool ThreadSafeQueue<T>::empty() const {
std::lock_guard<std::mutex> lock(mutex_);
return queue_.empty();
}
template <typename T>
bool ThreadSafeQueue<T>::full() const {
std::lock_guard<std::mutex> lock(mutex_);
return queue_.size() >= maxCount_;
}
template <typename T>
int ThreadSafeQueue<T>::size() const {
std::lock_guard<std::mutex> lock(mutex_);
return queue_.size();
}
#ifndef THREADSAFEQUEUE_H
#define THREADSAFEQUEUE_H
#include <vector>
#include <mutex>
template <typename T>
class ThreadSafeQueue {
public:
ThreadSafeQueue(int maxCount);
bool push(const T& value);
bool pop(T& value);
bool empty() const;
bool full() const;
int size() const;
private:
std::mutex mutex_;
std::vector<T> queue_;
int maxCount_;
};
#endif // THREADSAFEQUEUE_H
#ifndef VIDESDATA_H
#define VIDESDATA_H
#include "XNetSDKDefine.h"
#include<QString>
#include <QProcess>
#include <QDate>
#include <QProcess>
#include <QRegularExpression>
#include <QFile>
#include <QTextStream>
#include <QByteArray>
#include <QNetworkInterface>
#include <list>
namespace vides_data{
constexpr const char *HEADER_TYPE_KAY="Content-Type";
constexpr const char *HEADER_TYPE_VALUE="application/json";
constexpr const char *PROFLIE_TEST= "test";
struct response
{
int code;
void* data;
QString msg;
response() {}
};
struct requestDeviceStatus
{
QString sSn;
int8_t type;
int8_t status;
QString ip_addr;
requestDeviceStatus() {}
};
struct responseStsCredentials{
QString access_key_id;
QString access_key_secret;
QString bucket;
QString endpoint;
QString expiration;
QString security_token;
};
struct responseGb28181 {
QString sip_ip;
int sip_port;
QString serial;
QString realm;
QString username;
QString password;
int register_validity;
int heartbeat_interval;
QString device_id;
QString channel_id;
};
struct responseArea {
float bottom_right_corner_x;
float bottom_right_corner_y;
float bottom_left_corner_x;
float bottom_left_corner_y;
float top_left_corner_x;
float top_left_corner_y;
float top_right_corner_x;
float top_right_corner_y;
};
struct responseDeviceStatus
{
QString sSn;
int8_t type;
int8_t merchant_id;
std::list<responseArea>areas;
responseDeviceStatus() {}
};
struct responseDeviceData{
std::list<responseDeviceStatus> list;
responseStsCredentials sts_credentials;
};
struct localDeviceStatus
{
QString sSn;
int8_t type;
int8_t merchant_id;
SXSDK_IPAddress HostIP;
int HttpPort;
int TCPPort;
int ChannelNum;
QString UserName;
QString password;
localDeviceStatus() {}
};
struct requestFaceReconition
{
QString id;
QByteArray img;
QString sn;
qint64 time;
responseArea area;
requestFaceReconition() {}
};
struct faceRecognitionResult
{
QString id;
uint32_t x;
uint32_t y;
uint32_t width;
uint32_t height;
faceRecognitionResult() {}
};
struct responseFaceReconition
{
QString id;
QString img;
responseFaceReconition() {}
};
struct ParkingArea
{
float topLeftCornerX;
float topLeftCornerY;
float bottomLeftCornerX;
float bottomLeftCornerY;
float bottomRightCornerX;
float bottomRightCornerY;
float topRightCornerX;
float topRightCornerY;
ParkingArea() {}
};
struct LicensePlate
{
ParkingArea areaLocation;
QString new_plate;
QString new_color;
QByteArray img;
qint64 time;
ParkingArea recognition;
LicensePlate() {}
};
struct requestLicensePlate
{
QString sn;
std::list<LicensePlate> plates;
requestLicensePlate() {}
};
struct cameraParameters
{
QString sDevId;
int nDevPort;
QString sUserName;
QString sPassword;
int channel;
QString httpUrl;
QString sSn;
QString rtspUrl;
QString rtmpUrl;
cameraParameters() {}
};
struct NetWorkNetCommon {
char* GateWay; // 网关IP
char* HostIP; // 主机IP
char* HostName; // 主机名
int HttpPort; // HTTP服务端口
char* MAC; // MAC地址
int MaxBps; // 限定码流值
char* MonMode; // 监视协议 {"TCP","UDP","MCAST",…}
int SSLPort; // SSL侦听端口
char* Submask; // 子网掩码
int TCPMaxConn; // 最大连接数
int TCPPort; // TCP侦听端口
char* TransferPlan; // 传输策略"AutoAdapt":自适应, "Quality":质量优先, "Fluency":流量优先,"Transmission":网传优先
int UDPPort; // UDP侦听端口
bool UseHSDownLoad; // 是否启用高速录像下载
};
struct responseRecognitionData
{
int id;
qint64 inTime;
qint64 outTime;
int recognitionType;
QString sn;
};
inline bool isVirtualMachine()
{
QString dmiPath;
#ifdef Q_OS_WIN
dmiPath = "HKEY_LOCAL_MACHINE\\HARDWARE\\DESCRIPTION\\System";
#else
dmiPath = "/sys/devices/virtual/dmi/id/";
#endif
QFile file(dmiPath);
return file.exists();
}
inline QString getDefaultGateway() {
QProcess process;
QString gateway;
// 根据操作系统的不同选择不同的命令
#ifdef Q_OS_WIN
QString command = "ipconfig";
QStringList arguments;
arguments << "/all";
#elif defined(Q_OS_LINUX)
QString command = "ip";
QStringList arguments;
arguments << "route" << "show" << "default";
#elif defined(Q_OS_MAC)
QString command = "netstat";
QStringList arguments;
arguments << "-nr";
#endif
process.start(command, arguments);
// 等待进程结束
process.waitForFinished();
// 读取并处理输出
QString output(process.readAllStandardOutput());
#ifdef Q_OS_WIN
// 使用正则表达式来查找默认网关
QRegExp rx("Default Gateway[ .]*: (.+?)(\r\n|\n)");
if (rx.indexIn(output) != -1) {
gateway = rx.cap(1).trimmed();
}
#elif defined(Q_OS_LINUX) || defined(Q_OS_MAC)
// 对于Linux和Mac,分行并查找包含default字样的行
QStringList lines = output.split('\n');
QRegularExpression rx("^default via (\\S+)");
foreach (const QString &line, lines) {
QRegularExpressionMatch match = rx.match(line);
if (match.hasMatch()) {
gateway = match.captured(1);
break;
}
}
#endif
return gateway;
}
inline bool pingAddress(const QString &address) {
QProcess process;
QString program = "ping";
QStringList arguments;
arguments << "-c" << "1" << address; // -c 1 表示发送一个 Ping 包
process.start(program, arguments);
process.waitForFinished();
QString output(process.readAllStandardOutput());
return output.contains("1 packets transmitted, 1 received");
}
inline QString getSerialNumber() {
QProcess process;
// 使用管道将两个命令的执行结果串联起来,直接查找包含"Serial"的行
process.start("bash", QStringList() << "-c" << "cat /proc/cpuinfo | grep Serial");
process.waitForFinished(-1); // 等待命令执行完成
QString output = process.readAllStandardOutput();
QString serialNumber;
if (!output.isEmpty()) {
// 已经确保了输出仅包含 Serial 行,所以直接分割并提取
serialNumber = output.split(":").at(1).trimmed();
}
return serialNumber;
}
}
#endif // VIDESDATA_H
blur.jpg

5.9 KB

This source diff could not be displayed because it is too large. You can view the blob instead.
crop.png

20.1 KB

cxk.jpg

129 KB

This diff is collapsed. Click to expand it.
QT += core gui network multimedia sql concurrent
greaterThan(QT_MAJOR_VERSION, 4): QT += widgets
CONFIG += c++11
TARGET = GAMERAVIDEO
TEMPLATE = app
# The following define makes your compiler emit warnings if you use
# any Qt feature that has been marked deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
#QMAKE_LIBDIR += /usr/local/lib
#INCLUDEPATH+=/usr/local/include/opencv4
#INCLUDEPATH+=/usr/local/include/hyperface
#INCLUDEPATH+=/usr/local/include/hyper
#INCLUDEPATH+=/usr/local/include/XNetSDK
#INCLUDEPATH+=/usr/local/include/human
unix:contains(QMAKE_HOST.arch, x86_64) {
QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
}
unix:contains(QMAKE_HOST.arch, arm) {
QMAKE_LIBDIR += /usr/local/lib
}
# 根据编译器类型选择库路径和头文件路径
unix: {
# x86 架构
contains(QMAKE_HOST.arch, x86_64) {
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
}
# ARM 架构
contains(QMAKE_HOST.arch, arm) {
INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK
#INCLUDEPATH+=/usr/local/include/human
}
}
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
#INCLUDEPATH+=/usr/local/include/opencv4
#INCLUDEPATH+=/usr/local/include/hyperface
#INCLUDEPATH+=/usr/local/include/hyper
#QMAKE_LIBDIR += /usr/local/lib
#OPENCV_LIBS=-L /usr/local/lib -lopencv_core -lopencv_highgui -lopencv_videoio -lopencv_imgproc -lHyperFace -lopencv_imgcodecs
LIBS += -lopencv_core \
-lopencv_highgui \
-lopencv_videoio \
-lopencv_imgproc \
-lopencv_video \
-lHyperFace \
-lopencv_imgcodecs \
-lhyperlpr3 \
-lopencv_objdetect \
-lsohuman \
# -lssl \
# -lcrypto \
-lc \
-lXNetSDK
#-lz
SOURCES += \
Common.cpp \
FaceReconition.cpp \
LogHandler.cpp \
main.cpp \
mainwindow.cpp \
LicensePlateRecognition.cpp \
MediaFaceImage.cpp \
RecognizedInfo.cpp \
Httpclient.cpp \
HttpService.cpp \
TaskRunnable.cpp \
CameraHandle.cpp \
ParkingSpaceInfo.cpp \
HumanDetection.cpp
HEADERS += \
Common.h \
FaceRecognition.h \
LogHandle.h \
mainwindow.h \
LicensePlateRecognition.h \
MediaFaceImage.h \
RecognitionInfo.h \
HttpClient.h \
HttpService.h \
VidesData.h \
TaskRunnable.h \
CameraHandle.h \
ParkingSpaceInfo.h \
HumanDetection.h
#FORMS += \
# mainwindow.ui
# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target
RESOURCES += \
BG.qrc
QT += core gui network multimedia sql concurrent
CONFIG += c++11 console
CONFIG -= app_bundle
TARGET = GAMERAVIDEONOUI
TEMPLATE = app
# The following define makes your compiler emit warnings if you use
# any Qt feature that has been marked deprecated (the exact warnings
# depend on your compiler). Please consult the documentation of the
# deprecated API in order to know how to port your code away from it.
DEFINES += QT_DEPRECATED_WARNINGS
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
unix:contains(QMAKE_HOST.arch, x86_64) {
QMAKE_LIBDIR += /home/mark/Public/x86_opencv/lib
}
unix:contains(QMAKE_HOST.arch, arm) {
QMAKE_LIBDIR += /usr/local/lib
}
# 根据编译器类型选择库路径和头文件路径
unix: {
# x86 架构
contains(QMAKE_HOST.arch, x86_64) {
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/opencv4
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyperface
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/hyper
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/XNetSDK
INCLUDEPATH+=/home/mark/Public/x86_opencv/include/human
}
# ARM 架构
contains(QMAKE_HOST.arch, arm) {
INCLUDEPATH+=/usr/local/include/opencv4
INCLUDEPATH+=/usr/local/include/hyperface
INCLUDEPATH+=/usr/local/include/hyper
INCLUDEPATH+=/usr/local/include/XNetSDK
INCLUDEPATH+=/usr/local/include/human
}
}
# You can also make your code fail to compile if it uses deprecated APIs.
# In order to do so, uncomment the following line.
# You can also select to disable deprecated APIs only up to a certain version of Qt.
#DEFINES += QT_DISABLE_DEPRECATED_BEFORE=0x060000 # disables all the APIs deprecated before Qt 6.0.0
#INCLUDEPATH+=/usr/local/include/opencv4
#INCLUDEPATH+=/usr/local/include/hyperface
#INCLUDEPATH+=/usr/local/include/hyper
#QMAKE_LIBDIR += /usr/local/lib
#OPENCV_LIBS=-L /usr/local/lib -lopencv_core -lopencv_highgui -lopencv_videoio -lopencv_imgproc -lHyperFace -lopencv_imgcodecs
LIBS += -lopencv_core \
-lopencv_highgui \
-lopencv_videoio \
-lopencv_imgproc \
-lopencv_video \
-lHyperFace \
-lopencv_imgcodecs \
-lhyperlpr3 \
-lopencv_objdetect \
-lsohuman \
# -lssl \
# -lcrypto \
#-lc \
-lXNetSDK
#-lz
HEADERS += \
Common.h \
FaceRecognition.h \
LogHandle.h \
mainwindow.h \
LicensePlateRecognition.h \
MediaFaceImage.h \
RecognitionInfo.h \
HttpClient.h \
HttpService.h \
VidesData.h \
TaskRunnable.h \
CameraHandle.h \
ParkingSpaceInfo.h \
HumanDetection.h
SOURCES += \
Common.cpp \
FaceReconition.cpp \
LogHandler.cpp \
main.cpp \
mainwindow.cpp \
LicensePlateRecognition.cpp \
MediaFaceImage.cpp \
RecognizedInfo.cpp \
Httpclient.cpp \
HttpService.cpp \
TaskRunnable.cpp \
CameraHandle.cpp \
ParkingSpaceInfo.cpp \
HumanDetection.cpp
# Default rules for deployment.
qnx: target.path = /tmp/$${TARGET}/bin
else: unix:!android: target.path = /opt/$${TARGET}/bin
!isEmpty(target.path): INSTALLS += target
[devices]
rtps_urls=/home/mark/Public/build_gamera_videos/VID_20231122_145207.mp4
frame_counter=127
camera_logins=192.168.10.183:34567
username=admin
password=admin
sz_config_path=/home/mark/Public/build_gamera_videos/szConfigPath
sz_temp_path=/home/mark/Public/build_gamera_videos/szTempPath
[timer]
checkofflinetimer=10000000
delete_logfile_timer=86400000
delete_mkvflie_timer=100000
[faceFaceRecognitions]
images=/home/mark/Public/build_gamera_videos/images/lisi.jpg,/home/mark/Public/build_gamera_videos/images/guanyu.jpg,/home/mark/Public/build_gamera_videos/images/zhangfei.jpg
names=李四,关羽,张飞
[licensePlateRecognition]
model_paths=/home/mark/Public/build_gamera_videos/lprv3u_models/s
kun.jpg

212 KB

#include <QCoreApplication>
#include <opencv2/opencv.hpp>
#include "mainwindow.h"
int main(int argc, char *argv[])
{
QCoreApplication a(argc, argv);
qRegisterMetaType<cv::Mat>("cv::Mat");
// 设置环境变量
QString value = "rtsp_transport;udp";
qputenv("OPENCV_FFMPEG_CAPTURE_OPTIONS", value.toUtf8());
qputenv("QT_LOGGING_RULES", "qt.network.ssl=true");
MainWindow w;
return a.exec();
}
This diff is collapsed. Click to expand it.
#ifndef MAINWINDOW_H
#define MAINWINDOW_H
#include "Common.h"
#include "FaceRecognition.h"
#include "LicensePlateRecognition.h"
#include "hyper_lpr_sdk.h"
#include "CameraHandle.h"
#include "HttpService.h"
#include "VidesData.h"
#include "MediaFaceImage.h"
#include "HumanDetection.h"
#include <algorithm>
#include <QString>
#include <QTextCodec>
#include <QObject>
#include <QByteArray>
#include <QSettings>
#include <QTimer>
#include <QSemaphore>
#include <QDebug>
#include <QResource>
#include <opencv2/opencv.hpp>
#include <QRandomGenerator>
#include <QTcpServer>
#include <QTcpSocket>
//namespace Ui { class MainWindow; }
class MainWindow : public QObject
{
Q_OBJECT
public:
explicit MainWindow();
void initVideoOutPath();
void setVideoPath(int flag, const QString& path);
void createDirectory(int flag,const QString& dirName, const QString& successMsg, const QString& failureMsg);
void initFaceFaceRecognition();
void initCameras(vides_data::cameraParameters &parameter,const std::list<vides_data::responseArea>&areas);
static MainWindow * sp_this;
void sendJsonResponse(QTcpSocket* socket, int code, const QString& data, const QString& msg);
void sendEmptyResponse(QTcpSocket* socket);
void sendNotFoundResponse(QTcpSocket* socket);
void updateLocalFace(const QString &httpurl);
void removeImageFiles(QString id);
void modifyImagesAndNames(QString &modId);
void findLocalSerialNumber(QString &serialNumber);
void initDevConfigSyn(CameraHandle *cameraHandle);
void iniRecordingToString(QString &recorJson);
void iniEncodeToString(QString &enCodeJson);
void clearOfflineCameraHandle(QString sDevId, int nDevPort);
bool iniWorkSpVMn(vides_data::responseGb28181 *gb28181,QString &workSpWMn,QString &sn);
bool isDeviceInList(const QString& deviceId, const std::list<vides_data::responseDeviceStatus>& devices);
// 过滤函数
void deleteCloudNotCamer (const std::map<QString,vides_data::localDeviceStatus*>& localDevices,
const std::list<vides_data::responseDeviceStatus>& devices);
~MainWindow();
signals:
void shutdownSignals(QString sDevId, int nDevPort);
private slots:
void startCamera(const QString &httpurl);
void deleteLogFile();
void clearHandle(QString sDevId, int nDevPort);
void deleteMkvFileTimer();
void handleMatNewConnection();
private:
//Ui::MainWindow *ui;
QSettings *qSetting;
QTimer *deleteLogFileTimer;
QTimer *deleteFrameFileTimer;
QTimer*dePermissionSynTimer;
QTcpServer server;
//本地id:图片路径
std::map<QString,QString>localImageMap;
//云端id:oss路径
std::map<QString,QString>cloudImageMap;
QString modelPaths;
std::map<QString,CameraHandle*>faceDetectionParkingPushs;
};
#endif // MAINWINDOW_H
<?xml version="1.0" encoding="UTF-8"?>
<ui version="4.0">
<class>MainWindow</class>
<widget class="QMainWindow" name="MainWindow">
<property name="geometry">
<rect>
<x>0</x>
<y>0</y>
<width>987</width>
<height>600</height>
</rect>
</property>
<property name="windowTitle">
<string>MainWindow</string>
</property>
<widget class="QWidget" name="centralwidget">
<widget class="QWidget" name="layoutWidget">
<property name="geometry">
<rect>
<x>170</x>
<y>120</y>
<width>610</width>
<height>301</height>
</rect>
</property>
<layout class="QVBoxLayout" name="verticalLayout">
<item>
<layout class="QHBoxLayout" name="horizontalLayout">
<item>
<widget class="QLabel" name="label">
<property name="text">
<string>设备名称:</string>
</property>
</widget>
</item>
<item>
<widget class="QComboBox" name="device">
<property name="currentText">
<string/>
</property>
</widget>
</item>
</layout>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_5">
<item>
<widget class="QLabel" name="label_2">
<property name="text">
<string>起始时间:</string>
</property>
</widget>
</item>
<item>
<widget class="QDateTimeEdit" name="startTimer">
<property name="date">
<date>
<year>2024</year>
<month>1</month>
<day>17</day>
</date>
</property>
<property name="time">
<time>
<hour>0</hour>
<minute>0</minute>
<second>0</second>
</time>
</property>
<property name="maximumTime">
<time>
<hour>23</hour>
<minute>59</minute>
<second>59</second>
</time>
</property>
<property name="displayFormat">
<string>yyyy-MM-dd hh:mm:ss</string>
</property>
</widget>
</item>
<item>
<widget class="QLabel" name="label_3">
<property name="text">
<string>截止时间:</string>
</property>
</widget>
</item>
<item>
<widget class="QDateTimeEdit" name="endTimer">
<property name="dateTime">
<datetime>
<hour>10</hour>
<minute>42</minute>
<second>31</second>
<year>2024</year>
<month>1</month>
<day>17</day>
</datetime>
</property>
<property name="date">
<date>
<year>2024</year>
<month>1</month>
<day>17</day>
</date>
</property>
<property name="displayFormat">
<string>yyyy-MM-dd hh:mm:ss</string>
</property>
</widget>
</item>
</layout>
</item>
<item>
<layout class="QHBoxLayout" name="horizontalLayout_7">
<item>
<widget class="QLabel" name="label_4">
<property name="text">
<string>相机名称:</string>
</property>
</widget>
</item>
<item>
<widget class="QLineEdit" name="camera_name"/>
</item>
<item>
<widget class="QPushButton" name="pushButton">
<property name="text">
<string>截取视频</string>
</property>
</widget>
</item>
<item>
<widget class="QLabel" name="label_5">
<property name="text">
<string>视频路径:</string>
</property>
</widget>
</item>
<item>
<widget class="QLineEdit" name="video_path"/>
</item>
</layout>
</item>
<item>
<widget class="QFrame" name="frame">
<property name="frameShape">
<enum>QFrame::StyledPanel</enum>
</property>
<property name="frameShadow">
<enum>QFrame::Raised</enum>
</property>
<layout class="QHBoxLayout" name="horizontalLayout_2">
<item>
<widget class="QTableWidget" name="tableWidget">
<column>
<property name="text">
<string>设备名称</string>
</property>
</column>
<column>
<property name="text">
<string>当前时间</string>
</property>
</column>
<column>
<property name="text">
<string>车牌号</string>
</property>
</column>
</widget>
</item>
</layout>
</widget>
</item>
</layout>
</widget>
</widget>
</widget>
<resources/>
<connections/>
</ui>
mask.png

757 KB

<RCC>
<qresource prefix="/images"/>
</RCC>
#ifndef XXX_H
#define XXX_H
#endif // XXX_H
yifei.jpg

50.6 KB

Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment