Commit 7bcc5da8 by “liusq”

优化相机,颜色,接口检验

parent 27ffe3f4
......@@ -42,7 +42,7 @@ void AlgorithmTaskManage::initialize(int humanDetectionLen, int licensePlateLen,
}
void AlgorithmTaskManage::initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,int &uniformColor) {
float carShapeConfidence,QString &uniformColor) {
for (int i = 0; i < humanDetectionLen; ++i) {
HumanDetection* human=new HumanDetection(modelPaths,carShapeConfidence);
human->setHuManParameter(uniformColor);
......@@ -113,7 +113,7 @@ void AlgorithmTaskManage::releaseResources(const vides_data::DetectionParams& pa
QString modelPath = params.modelPaths;
float humanCarShapeConfidence = params.humanCarShapeConfidence;
int uniformColor = params.uniformColor;
QString uniformColor = params.uniformColor;
std::map<QString, QString> faceMaps = params.faceMaps;
int numberFaces = params.numberFaces;
float faceConfidence = params.faceConfidence;
......
......@@ -26,7 +26,7 @@ public:
void initialize(int humanDetectionLen, int licensePlateLen, int faceLen,bool first,__uint8_t algorithmPermissions );
void initHumanDetectionManage(const QString &modelPaths,
float carShapeConfidence,int &uniformColor);
float carShapeConfidence,QString &uniformColor);
void initLicensePlateManage(const QString &modelPaths,bool is_high,int maxNum,bool useHalf,
float boxThreshold,float nmsThreshold,float recThreshold);
......
......@@ -394,6 +394,10 @@ int CameraHandle::callbackFunction(XSDK_HANDLE hObject, QString &szString) {
{
qInfo() << "OnDevAlarmCallback[Dev:" << hObject << "][Event:" << szString << "]";
}
if(algorithmPermissions ==0x00){
return -1 ;
}
cv::Mat image;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch();
......@@ -426,6 +430,11 @@ void CameraHandle::sdkDevSnapSyn(XSDK_HANDLE hDevice, int nChannel){
semaphore.release(); // 释放信号量
});
Common & instace= Common::getInstance();
if(algorithmPermissions ==0x00){
return ;
}
cv::Mat image;
MediaFaceImage* mediaFaceImage= MediaFaceImage::getInstance();
qint64 currentTime= QDateTime::currentSecsSinceEpoch();
......@@ -511,7 +520,7 @@ void CameraHandle::checkAndUpdateCurrentPlate(ParkingSpaceInfo*park,const cv::Ma
}else {
// 没有车辆或车辆在停车区域内部,移除队列
park->removeNoQueue();
qDebug()<<QString("SN(%1): no出场::%2").arg(sSn).arg(car_size);
qInfo()<<QString("SN(%1): no出场::%2").arg(sSn).arg(car_size);
}
}else{
//当前不为空,新车,新车入场,老车出场
......
#include "HttpService.h"
vides_data::responseStsCredentials HttpService::stsCredentials;
QString HttpService::sing_key;
HttpService::HttpService() {
......@@ -45,6 +46,7 @@ vides_data::response* HttpService::httpPostDeviceStatus(vides_data::requestDevic
vides_data::response *resp = new vides_data::response();
QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -78,6 +80,7 @@ vides_data::response* HttpService::httpPostRecord(int id,int recongnition_type,Q
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -99,6 +102,15 @@ vides_data::response* HttpService::httpPostRecord(int id,int recongnition_type,Q
void HttpService::setHttpUrl(const QString &httpUrl){
this->httpUrl=httpUrl;
}
void HttpService::setSingKey(const QString &key) {
sing_key = key;
}
QString HttpService::getSingKey() {
return sing_key;
}
vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_data::responseDeviceData&responseData) {
httpUrl.append("/api/v1.0/device/all");
vides_data::response *resp=new vides_data::response();
......@@ -106,6 +118,7 @@ vides_data::response *HttpService::httpFindCameras(QString &serialNumber,vides_d
query.addQueryItem("sn",serialNumber);
query.addQueryItem("new_token",QString::number(1));
QNetworkRequest request;
assembleSingHeaders(request);
QUrl url(httpUrl);
url.setQuery(query);
request.setUrl(url);
......@@ -222,6 +235,7 @@ vides_data::response *HttpService::httpLicensePlateRecognition(vides_data::reque
QByteArray bytearr= doc.toJson(QJsonDocument::Compact);
vides_data::response *resp=new vides_data::response();
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -264,6 +278,7 @@ vides_data::response* HttpService::httpFindFaceReconition(QString &serialNumber,
query.addQueryItem("sn",serialNumber);
QUrl url(httpUrl);
url.setQuery(query);
assembleSingHeaders(request);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -305,6 +320,7 @@ vides_data::response *HttpService::httpPostUniforms(QByteArray &img,QString &id,
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -346,6 +362,7 @@ vides_data::response *HttpService::httpPostFacePopulation(QByteArray &img,int &h
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -394,6 +411,7 @@ vides_data::response *HttpService::httpPostFaceReconition(vides_data::requestFac
vides_data::response *resp=new vides_data::response();
QNetworkRequest request;
assembleSingHeaders(request);
request.setUrl(QUrl(httpUrl));
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
......@@ -419,6 +437,7 @@ vides_data::response*HttpService::httpFindGb28181Config(QString &serialNumber){
QNetworkRequest request;
QUrl url(httpUrl);
url.setQuery(query);
assembleSingHeaders(request);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -462,6 +481,7 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
QNetworkRequest request;
QUrl url(httpUrl);
url.setQuery(query);
assembleSingHeaders(request);
request.setUrl(url);
request.setRawHeader(vides_data::HEADER_TYPE_KAY, vides_data::HEADER_TYPE_VALUE);
QMutexLocker locker(&m_httpClientMutex);
......@@ -533,7 +553,7 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
// 解析 uniformConfig
QJsonObject uniformConfigObj = dataObj["uniformConfig"].toObject();
config.uniformConfig.isOn = uniformConfigObj["isOn"].toBool();
config.uniformConfig.uniformColor = uniformConfigObj["uniformColor"].toInt();
config.uniformConfig.uniformColor = uniformConfigObj["uniformColor"].toString();
config.uniformConfig.humanDetectionLen = uniformConfigObj["humanDetectionLen"].toInt();
config.uniformConfig.updateAt = uniformConfigObj["updateAt"].toVariant().toULongLong();
config.uniformConfig.carShapeConfidence = uniformConfigObj["carShapeConfidence"].toVariant().toFloat();
......@@ -542,7 +562,6 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
config.humanConfig.isOn=humanConfigObj["isOn"].toBool();
config.humanConfig.updateAt = humanConfigObj["updateAt"].toVariant().toULongLong();
config.humanConfig.faceFrequency = humanConfigObj["faceFrequency"].toVariant().toUInt();
// 解析 devicesConfig
QJsonObject devicesConfigObj = dataObj["camera"].toObject();
config.camera.password = devicesConfigObj["password"].toString();
......@@ -575,6 +594,23 @@ vides_data::response *HttpService::httpDeviceConfig(const QString &serialNumber,
}
void HttpService::assembleSingHeaders(QNetworkRequest &reques){
QString ts = QString::number(QDateTime::currentMSecsSinceEpoch() / 1000);
// 计算签名
QCryptographicHash hash(QCryptographicHash::Md5);
hash.addData(HttpService::getSingKey().toUtf8());
hash.addData(ts.toUtf8());
QString sign = hash.result().toHex();
// 设置头部信息
QMap<QString, QVariant> headers;
headers.insert("ts", ts);
headers.insert("sign", sign);
for (auto it = headers.begin(); it != headers.end(); ++it) {
reques.setRawHeader(it.key().toLatin1(), it.value().toString().toLatin1());
}
}
vides_data::response*HttpService::httpFindStream(QString &serialNumber){
httpUrl.append("/api/v1.0/stream");
......
......@@ -49,7 +49,13 @@ public:
vides_data::response *httpUploadFile(const QString &filePath,QString& accessKeyId,QString& accessKeySecret,
QString & bucketName,QString &securityToken);
void setHttpUrl(const QString & httpUrl);
static void setSingKey(const QString &key);
static QString getSingKey();
//组装验证头
void assembleSingHeaders(QNetworkRequest &reques);
vides_data::response *httpDeviceConfig(const QString &serialNumber,vides_data::responseConfig &config);
......@@ -62,6 +68,7 @@ private:
HttpClient m_httpClient;
QMutex m_httpClientMutex;
static QString sing_key;
};
#endif // HTTPSERVICE_H
......@@ -72,7 +72,7 @@ void HumanDetection::draw_human_on_image(const cv::Mat& image, const TCV_ObjectL
}
}
void HumanDetection::setHuManParameter(int &uniformColor){
void HumanDetection::setHuManParameter(QString &uniformColor){
this->uniformColor=uniformColor;
}
......@@ -98,23 +98,38 @@ int HumanDetection::findHuManCar(const cv::Mat &source, int res,std::map<int,int
int num = 0;
if (res == 0x00 || res == 0x02) {
QStringList colorList = uniformColor.split(',');
num = TCV_HumanDetectorGetNumOfHuman(detector);
qInfo() << QString("SN(%1): 获取人形数量:%2").arg(sSn).arg(num);
if (num == 0) return num; // 无行人检测结果,提前返回
std::vector<TCV_ObjectLocation> results(num);
TCV_HumanDetectorGetHumanLocation(detector, results.data(), num);
if(res==0x02 && colorList.size()==0){
reMap[0x02] = 0; // 未穿工服的行人数量
reMap[0x00] = num; // 所有满足条件的行人数量
num = 0;
return num;
}
int count_no_uniform = 0; // 未穿工服的行人数量
int count_all = 0; // 所有满足条件的行人数量
std::vector<TCV_ObjectLocation> results(num);
TCV_HumanDetectorGetHumanLocation(detector, results.data(), num);
QSet<int> uniformNumbers;
for (const QString &numStr : colorList) {
bool ok;
int num = numStr.toInt(&ok);
if (ok) {
uniformNumbers.insert(num);
}
}
for (const auto &person : results) {
int tenPlace = uniformColor / 10; // 十位
int onePlace = uniformColor % 10; // 个位
if (std::abs(person.y2 - person.y1) >= heightReference) {
++count_all;
//工服
if(person.uniform != tenPlace && person.uniform != onePlace){
if(!uniformNumbers.contains(person.uniform)){
vides_data::ParkingArea area;
area.topLeftCornerX=person.x1;
area.topLeftCornerY=person.y1;
......
......@@ -19,7 +19,7 @@ public:
int findHuManCar(const cv::Mat &source,int res,std::map<int,int>&reMap,QString &sSn,
float &heightReference, std::vector<vides_data::ParkingArea> &currentPlate);
void setHuManParameter(int &uniformColor);
void setHuManParameter(QString &uniformColor);
void draw_human_on_image(const cv::Mat& image, const TCV_ObjectLocation* boxes, int size);
......@@ -27,7 +27,7 @@ private:
//高度基准
float heightReference;
int uniformColor;
QString uniformColor;
TCV_HumanDetector *detector;
......
......@@ -289,7 +289,7 @@ struct LicensePlateConfig {
struct UniformConfig {
bool isOn;
int uniformColor;
QString uniformColor;
int humanDetectionLen;
float carShapeConfidence;
quint64 updateAt;
......@@ -312,6 +312,8 @@ struct MqttConfig {
quint64 updateAt;
};
struct responseConfig {
MainFormat mainFormat;
ExtraFormat extraFormat;
......@@ -343,7 +345,7 @@ struct DetectionParams {
int newFaceLen;
QString modelPaths;
float humanCarShapeConfidence;
int uniformColor;
QString uniformColor;
std::map<QString, QString> faceMaps;
int numberFaces;
float faceConfidence;
......
......@@ -59,7 +59,7 @@ MainWindow::MainWindow():isResetting(false)
initFaceFaceRecognition();
int uniformColor=config.uniformConfig.uniformColor;
QString uniformColor=config.uniformConfig.uniformColor;
int humanDetectionLen=config.uniformConfig.humanDetectionLen;
int licensePlateLen=config.licensePlateConfig.licensePlateLen;
......@@ -102,7 +102,9 @@ MainWindow::MainWindow():isResetting(false)
},Qt::QueuedConnection);
this->startCamera(httpurl);
QString sign_Key = qSetting->value("cloudservice/signKey","sign_key").toString();
HttpService::setSingKey(sign_Key);
float confidence=config.faceConfig.confidence;
int faceNumbers=config.faceConfig.faceNumbers;
......
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment