GrabBag/App/LapWeld/LapWeldApp/Presenter/Src/LapWeldPresenter.cpp

1414 lines
50 KiB
C++
Raw Normal View History

2025-09-14 14:51:38 +08:00
#include "LapWeldPresenter.h"
2025-06-08 12:48:04 +08:00
#include "VrError.h"
#include "VrLog.h"
2025-06-08 12:48:04 +08:00
#include <QtCore/QCoreApplication>
#include <QtCore/QFileInfo>
#include <QtCore/QDir>
#include <QtCore/QString>
#include <QtCore/QStandardPaths>
#include <QtCore/QFile>
#include <cmath>
#include <algorithm>
#include <QImage>
#include <QThread>
2025-09-14 14:51:38 +08:00
#include <atomic>
#include "Version.h"
#include "VrTimeUtils.h"
#include "VrDateUtils.h"
2025-09-14 14:51:38 +08:00
#include "SX_lapWeldDetection_Export.h"
#include "SG_baseDataType.h"
#include "VrConvert.h"
#include "PointCloudImageUtils.h"
2025-06-08 12:48:04 +08:00
2025-09-14 14:51:38 +08:00
LapWeldPresenter::LapWeldPresenter()
2025-06-08 12:48:04 +08:00
: m_vrConfig(nullptr)
, m_pStatus(nullptr)
2025-07-23 01:35:14 +08:00
, m_pDetectPresenter(nullptr)
, m_pTCPServer(nullptr)
, m_bCameraConnected(false)
, m_bTCPConnected(false)
, m_currentWorkStatus(WorkStatus::Error)
2025-06-08 12:48:04 +08:00
{
m_detectionDataCache.clear();
// 设置配置命令处理器
m_configMonitor.SetCommandHandler(this);
2025-06-08 12:48:04 +08:00
}
2025-09-14 14:51:38 +08:00
LapWeldPresenter::~LapWeldPresenter()
2025-06-08 12:48:04 +08:00
{
// 停止配置监控器
m_configMonitor.Stop();
// 停止算法检测线程
m_bAlgoDetectThreadRunning = false;
m_algoDetectCondition.notify_all();
// 等待算法检测线程结束
if (m_algoDetectThread.joinable()) {
m_algoDetectThread.join();
}
// 释放缓存的检测数据
_ClearDetectionDataCache();
// 释放TCP服务器
if (m_pTCPServer) {
m_pTCPServer->Deinitialize();
delete m_pTCPServer;
m_pTCPServer = nullptr;
}
// 释放相机设备资源
for(auto it = m_vrEyeDeviceList.begin(); it != m_vrEyeDeviceList.end(); it++)
{
if (it->second != nullptr) {
it->second->CloseDevice();
delete it->second;
it->second = nullptr;
}
2025-06-08 12:48:04 +08:00
}
m_vrEyeDeviceList.clear();
// 释放检测处理器
2025-07-23 01:35:14 +08:00
if(m_pDetectPresenter)
{
delete m_pDetectPresenter;
m_pDetectPresenter = nullptr;
}
// 释放配置对象
if (m_vrConfig) {
delete m_vrConfig;
m_vrConfig = nullptr;
}
2025-06-08 12:48:04 +08:00
}
2025-09-14 14:51:38 +08:00
int LapWeldPresenter::Init()
2025-06-08 12:48:04 +08:00
{
2025-09-14 14:51:38 +08:00
LOG_DEBUG("Start APP Version: %s\n", LAPWELD_FULL_VERSION_STRING);
// 初始化连接状态
m_bCameraConnected = false;
m_currentWorkStatus = WorkStatus::InitIng;
m_pStatus->OnWorkStatusChanged(m_currentWorkStatus);
2025-07-23 01:35:14 +08:00
m_pDetectPresenter = new DetectPresenter();
// 创建 VrConfig 实例
if (!IVrConfig::CreateInstance(&m_vrConfig) || !m_vrConfig) {
LOG_ERROR("Failed to create VrConfig instance\n");
m_pStatus->OnStatusUpdate("配置实例创建失败");
return ERR_CODE(DEV_CONFIG_ERR);
}
// 设置配置改变通知回调
if (m_vrConfig) {
m_vrConfig->SetConfigChangeNotify(this);
}
2025-06-08 12:48:04 +08:00
// 获取配置文件路径
QString configPath = PathManager::GetInstance().GetConfigFilePath();
// 加载配置文件
m_configResult = m_vrConfig->LoadConfig(configPath.toStdString());
m_projectType = m_configResult.projectType;
int nRet = SUCCESS;
2025-06-08 12:48:04 +08:00
// 启动共享内存监控
if (!m_configMonitor.Start()) {
LOG_WARNING("Failed to start config monitor\n");
// 不返回错误,允许应用继续运行
}
// 初始化算法参数
nRet = InitAlgorithmParams();
if (nRet != 0) {
m_pStatus->OnStatusUpdate("算法参数初始化失败");
LOG_ERROR("Algorithm parameters initialization failed with error: %d\n", nRet);
} else {
m_pStatus->OnStatusUpdate("算法参数初始化成功");
LOG_INFO("Algorithm parameters initialization successful\n");
}
InitCamera(m_configResult.cameraList);
LOG_INFO("Camera initialization completed. Connected cameras: %zu, default camera index: %d\n",
m_vrEyeDeviceList.size(), m_currentCameraIndex);
// 初始化TCP服务器
nRet = InitTCPServer();
2025-07-23 01:35:14 +08:00
if (nRet != 0) {
m_pStatus->OnStatusUpdate("TCP服务器初始化失败");
m_bTCPConnected = false;
2025-07-23 01:35:14 +08:00
} else {
m_pStatus->OnStatusUpdate("TCP服务器初始化成功");
2025-07-23 01:35:14 +08:00
}
m_bAlgoDetectThreadRunning = true;
2025-09-14 14:51:38 +08:00
m_algoDetectThread = std::thread(&LapWeldPresenter::_AlgoDetectThread, this);
m_algoDetectThread.detach();
2025-06-08 12:48:04 +08:00
m_pStatus->OnStatusUpdate("设备初始化完成");
CheckAndUpdateWorkStatus();
QString str = QString("%1 配置初始化成功").arg(ProjectTypeToString(m_configResult.projectType).c_str());
m_pStatus->OnStatusUpdate(str.toStdString());
LOG_INFO("Configuration initialized successfully\n");
2025-06-08 12:48:04 +08:00
return SUCCESS;
}
// TCP服务器初始化
int LapWeldPresenter::InitTCPServer()
{
LOG_DEBUG("Initializing TCP server\n");
// 创建TCP服务器协议实例
if (m_pTCPServer == nullptr) {
m_pTCPServer = new TCPServerProtocol();
}
// 初始化TCP服务器使用默认端口5020
int result = m_pTCPServer->Initialize(5020);
if (result != 0) {
LOG_ERROR("Failed to initialize TCP server: %d\n", result);
return result;
}
// 设置连接状态回调
m_pTCPServer->SetConnectionCallback([this](bool connected) {
this->OnTCPConnectionChanged(connected);
});
// 设置检测触发回调
m_pTCPServer->SetDetectionTriggerCallback([this](bool startWork, int cameraIndex, qint64 timestamp) {
return this->OnTCPDetectionTrigger(startWork, cameraIndex, timestamp);
});
LOG_INFO("TCP server initialized successfully on port 5020\n");
m_bTCPConnected = true;
return SUCCESS;
}
// 相机协议相关方法
2025-09-14 14:51:38 +08:00
int LapWeldPresenter::InitCamera(std::vector<DeviceInfo>& cameraList)
{
// 通知UI相机个数
int cameraCount = cameraList.size();
// 初始化相机列表,预分配空间
m_vrEyeDeviceList.resize(cameraCount, std::make_pair("", nullptr));
for(int i = 0; i < cameraCount; i++)
{
m_vrEyeDeviceList[i] = std::make_pair(cameraList[i].name, nullptr);
}
m_pStatus->OnCameraCountChanged(cameraCount);
if(cameraCount > 0){
if (cameraCount >= 1) {
// 尝试打开相机
int nRet = _OpenDevice(1, cameraList[0].name.c_str(), cameraList[0].ip.c_str(), m_projectType);
m_pStatus->OnCamera1StatusChanged(nRet == SUCCESS);
m_bCameraConnected = (nRet == SUCCESS);
}
if (cameraCount >= 2) {
// 尝试打开相机
int nRet = _OpenDevice(2, cameraList[1].name.c_str(), cameraList[1].ip.c_str(), m_projectType);
m_pStatus->OnCamera2StatusChanged(nRet == SUCCESS);
m_bCameraConnected = (nRet == SUCCESS);
}
} else {
m_vrEyeDeviceList.resize(1, std::make_pair("", nullptr));
_OpenDevice(1, "相机", nullptr, m_projectType);
}
// 设置默认相机索引为第一个连接的相机
m_currentCameraIndex = 1; // 默认从1开始
for (int i = 0; i < static_cast<int>(m_vrEyeDeviceList.size()); i++) {
if (m_vrEyeDeviceList[i].second != nullptr) {
m_currentCameraIndex = i + 1; // 找到第一个连接的相机
break;
}
}
return SUCCESS;
}
// 初始化算法参数
2025-09-14 14:51:38 +08:00
int LapWeldPresenter::InitAlgorithmParams()
{
LOG_DEBUG("Start initializing algorithm parameters\n");
QString exePath = QCoreApplication::applicationFilePath();
2025-07-23 01:35:14 +08:00
// 清空现有的手眼标定矩阵列表
m_clibMatrixList.clear();
// 获取手眼标定文件路径并确保文件存在
QString clibPath = PathManager::GetInstance().GetCalibrationFilePath();
2025-07-23 01:35:14 +08:00
LOG_INFO("Loading hand-eye matrices from: %s\n", clibPath.toStdString().c_str());
// 读取存在的矩阵数量
int nExistMatrixNum = CVrConvert::GetClibMatrixCount(clibPath.toStdString().c_str());
LOG_INFO("Found %d hand-eye calibration matrices\n", nExistMatrixNum);
// 循环加载每个矩阵
for(int matrixIndex = 0; matrixIndex < nExistMatrixNum; matrixIndex++)
{
2025-07-23 01:35:14 +08:00
// 构造矩阵标识符
char matrixIdent[64];
#ifdef _WIN32
sprintf_s(matrixIdent, "CalibMatrixInfo_%d", matrixIndex);
#else
sprintf(matrixIdent, "CalibMatrixInfo_%d", matrixIndex);
#endif
// 创建新的标定矩阵结构
CalibMatrix calibMatrix;
// 初始化为单位矩阵
double initClibMatrix[16] = {
1.0, 0.0, 0.0, 0.0, // 第一行
0.0, 1.0, 0.0, 0.0, // 第二行
0.0, 0.0, 1.0, 0.0, // 第三行
0.0, 0.0, 0.0, 1.0 // 第四行
};
// 加载矩阵数据
bool loadSuccess = CVrConvert::LoadClibMatrix(clibPath.toStdString().c_str(), matrixIdent, "dCalibMatrix", calibMatrix.clibMatrix);
if(loadSuccess)
{
m_clibMatrixList.push_back(calibMatrix);
LOG_INFO("Successfully loaded matrix %d\n", matrixIndex);
// 输出矩阵内容
QString clibMatrixStr;
LOG_INFO("Matrix %d content:\n", matrixIndex);
for (int i = 0; i < 4; ++i) {
clibMatrixStr.clear();
for (int j = 0; j < 4; ++j) {
clibMatrixStr += QString::asprintf("%8.4f ", calibMatrix.clibMatrix[i * 4 + j]);
}
LOG_INFO(" %s\n", clibMatrixStr.toStdString().c_str());
}
}
else
{
LOG_WARNING("Failed to load matrix %d, using identity matrix\n", matrixIndex);
// 如果加载失败,使用单位矩阵
memcpy(calibMatrix.clibMatrix, initClibMatrix, sizeof(initClibMatrix));
m_clibMatrixList.push_back(calibMatrix);
2025-07-15 21:06:09 +08:00
}
2025-07-23 01:35:14 +08:00
}
2025-07-23 01:35:14 +08:00
LOG_INFO("Total loaded %zu hand-eye calibration matrices\n", m_clibMatrixList.size());
// 获取配置文件路径
QString configPath = PathManager::GetInstance().GetConfigFilePath();
2025-07-15 21:06:09 +08:00
LOG_INFO("Loading config: %s\n", configPath.toStdString().c_str());
// 读取配置文件
ConfigResult configResult = m_vrConfig->LoadConfig(configPath.toStdString());
const VrAlgorithmParams& xmlParams = configResult.algorithmParams;
// 保存调试参数
m_debugParam = configResult.debugParam;
2025-09-14 14:51:38 +08:00
LOG_INFO("Loaded XML params - LapWeld: lapHeight=%.1f, weldMinLen=%.1f, weldRefPoints=%d\n",
xmlParams.lapWeldParam.lapHeight, xmlParams.lapWeldParam.weldMinLen, xmlParams.lapWeldParam.weldRefPoints);
LOG_INFO("Loaded XML params - Filter: continuityTh=%.1f, outlierTh=%d\n",
xmlParams.filterParam.continuityTh, xmlParams.filterParam.outlierTh);
// 直接使用配置结构
m_algorithmParams = xmlParams;
2025-07-23 01:35:14 +08:00
LOG_INFO("projectType: %s\n", ProjectTypeToString(m_projectType).c_str());
LOG_INFO("Algorithm parameters initialized successfully:\n");
LOG_INFO(" LapWeld: lapHeight=%.1f, weldMinLen=%.1f, weldRefPoints=%d\n",
m_algorithmParams.lapWeldParam.lapHeight, m_algorithmParams.lapWeldParam.weldMinLen, m_algorithmParams.lapWeldParam.weldRefPoints);
2025-07-23 01:35:14 +08:00
// 循环打印所有相机的调平参数
LOG_INFO("Loading plane calibration parameters for all cameras:\n");
for (const auto& cameraParam : m_algorithmParams.planeCalibParam.cameraCalibParams) {
LOG_INFO("Camera %d (%s) calibration parameters:\n",
cameraParam.cameraIndex, cameraParam.cameraName.c_str());
LOG_INFO(" Is calibrated: %s\n", cameraParam.isCalibrated ? "YES" : "NO");
LOG_INFO(" Plane height: %.3f\n", cameraParam.planeHeight);
LOG_INFO(" Plane calibration matrix:\n");
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[0], cameraParam.planeCalib[1], cameraParam.planeCalib[2]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[3], cameraParam.planeCalib[4], cameraParam.planeCalib[5]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[6], cameraParam.planeCalib[7], cameraParam.planeCalib[8]);
LOG_INFO(" Inverse rotation matrix:\n");
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[0], cameraParam.invRMatrix[1], cameraParam.invRMatrix[2]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[3], cameraParam.invRMatrix[4], cameraParam.invRMatrix[5]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[6], cameraParam.invRMatrix[7], cameraParam.invRMatrix[8]);
LOG_INFO(" --------------------------------\n");
}
2025-07-23 01:35:14 +08:00
return SUCCESS;
}
2025-07-23 01:35:14 +08:00
// 手眼标定矩阵管理方法实现
2025-09-14 14:51:38 +08:00
const CalibMatrix LapWeldPresenter::GetClibMatrix(int index) const
2025-07-23 01:35:14 +08:00
{
CalibMatrix clibMatrix;
double initClibMatrix[16] = {
1.0, 0.0, 0.0, 0.0, // 第一行
0.0, 1.0, 0.0, 0.0, // 第二行
0.0, 0.0, 1.0, 0.0, // 第三行
0.0, 0.0, 0.0, 1.0 // 第四行
};
memcpy(clibMatrix.clibMatrix, initClibMatrix, sizeof(initClibMatrix));
if (index >= 0 && index < static_cast<int>(m_clibMatrixList.size())) {
clibMatrix = m_clibMatrixList[index];
memcpy(clibMatrix.clibMatrix, m_clibMatrixList[index].clibMatrix, sizeof(initClibMatrix));
} else {
LOG_WARNING("Invalid hand-eye calibration matrix\n");
2025-07-23 01:35:14 +08:00
}
return clibMatrix;
}
void LapWeldPresenter::SetStatusCallback(IYLapWeldStatus* status)
2025-06-08 12:48:04 +08:00
{
m_pStatus = status;
}
// TCP服务器回调函数实现
void LapWeldPresenter::OnTCPConnectionChanged(bool connected)
{
LOG_INFO("TCP connection status changed: %s\n", (connected ? "connected" : "disconnected"));
// 更新TCP连接状态
m_bTCPConnected = connected;
2025-06-08 12:48:04 +08:00
if (m_pStatus) {
2025-06-19 01:38:50 +08:00
// 发送详细的页面状态信息
if (connected) {
m_pStatus->OnStatusUpdate("TCP客户端连接成功通信正常");
2025-06-19 01:38:50 +08:00
} else {
m_pStatus->OnStatusUpdate("TCP客户端连接断开等待重新连接");
2025-06-19 01:38:50 +08:00
}
2025-06-08 12:48:04 +08:00
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
2025-06-08 12:48:04 +08:00
}
bool LapWeldPresenter::OnTCPDetectionTrigger(bool startWork, int cameraIndex, qint64 timestamp)
2025-06-08 12:48:04 +08:00
{
LOG_INFO("Received TCP detection trigger: %s for camera index: %d, timestamp: %lld\n",
(startWork ? "start work" : "stop work"), cameraIndex, timestamp);
if (startWork) {
// 启动检测
int result = StartDetection(cameraIndex, false); // 手动触发模式
if (result == SUCCESS) {
LOG_INFO("Detection started successfully via TCP trigger\n");
return true;
} else {
LOG_ERROR("Failed to start detection via TCP trigger, error: %d\n", result);
return false;
}
} else {
// 停止检测
int result = StopDetection();
if (result == SUCCESS) {
LOG_INFO("Detection stopped successfully via TCP trigger\n");
return true;
} else {
LOG_ERROR("Failed to stop detection via TCP trigger, error: %d\n", result);
return false;
}
2025-06-08 12:48:04 +08:00
}
}
// 模拟检测函数,用于演示
2025-09-14 14:51:38 +08:00
int LapWeldPresenter::StartDetection(int cameraIdx, bool isAuto)
2025-06-08 12:48:04 +08:00
{
LOG_INFO("--------------------------------\n");
LOG_INFO("Start detection with camera index: %d\n", cameraIdx);
2025-06-08 12:48:04 +08:00
// 检查设备状态是否准备就绪
if (isAuto && m_currentWorkStatus != WorkStatus::Ready) {
LOG_INFO("Device not ready, cannot start detection\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("设备未准备就绪,无法开始检测");
}
return ERR_CODE(DEV_BUSY);
}
// 保存当前使用的相机ID从1开始编号
if(-1 != cameraIdx){
m_currentCameraIndex = cameraIdx;
}
int cameraIndex = m_currentCameraIndex;
m_currentWorkStatus = WorkStatus::Working;
2025-06-08 12:48:04 +08:00
// 通知UI工作状态变更为"正在工作"
if (m_pStatus) {
m_pStatus->OnWorkStatusChanged(WorkStatus::Working);
}
// 设置机械臂工作状态为忙碌
2025-06-08 12:48:04 +08:00
if(m_vrEyeDeviceList.empty()){
LOG_ERROR("No camera device found\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("未找到相机设备");
}
return ERR_CODE(DEV_NOT_FIND);
}
// 清空检测数据缓存(释放之前的内存)
_ClearDetectionDataCache();
int nRet = SUCCESS;
// 根据参数决定启动哪些相机
// 启动指定相机cameraIndex为相机ID从1开始编号
int arrayIndex = cameraIndex - 1; // 转换为数组索引从0开始
// 检查相机是否连接
if (arrayIndex < 0 || arrayIndex >= static_cast<int>(m_vrEyeDeviceList.size()) || m_vrEyeDeviceList[arrayIndex].second == nullptr) {
LOG_ERROR("Camera %d is not connected\n", cameraIndex);
QString cameraName = (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first) : QString("相机%1").arg(cameraIndex);
m_pStatus->OnStatusUpdate(QString("%1 未连接").arg(cameraName).toStdString());
return ERR_CODE(DEV_NOT_FIND);
}
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* pDevice = m_vrEyeDeviceList[arrayIndex].second;
2025-07-23 01:35:14 +08:00
EVzResultDataType eDataType = keResultDataType_Position;
if(m_projectType == ProjectType::DirectBag){
eDataType = keResultDataType_PointXYZRGBA;
}
2025-09-14 14:51:38 +08:00
pDevice->SetStatusCallback(&LapWeldPresenter::_StaticCameraNotify, this);
2025-07-23 01:35:14 +08:00
// 开始
2025-09-14 14:51:38 +08:00
nRet = pDevice->StartDetect(&LapWeldPresenter::_StaticDetectionCallback, eDataType, this);
2025-07-23 01:35:14 +08:00
LOG_INFO("Camera ID %d start detection nRet: %d\n", cameraIndex, nRet);
if (nRet == SUCCESS) {
QString cameraName = QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first);
m_pStatus->OnStatusUpdate(QString("启动%1检测成功").arg(cameraName).toStdString());
} else {
LOG_ERROR("Camera ID %d start detection failed with error: %d\n", cameraIndex, nRet);
QString cameraName = QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first);
m_pStatus->OnStatusUpdate(QString("启动%1检测失败[%d]").arg(cameraName).arg(nRet).toStdString());
}
} else {
LOG_ERROR("Invalid camera ID: %d, valid range is 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size());
m_pStatus->OnStatusUpdate(QString("无效的相机ID: %1有效范围: 1-%2").arg(cameraIndex).arg(m_vrEyeDeviceList.size()).toStdString());
nRet = ERR_CODE(DEV_NOT_FIND);
}
return nRet;
}
2025-09-14 14:51:38 +08:00
int LapWeldPresenter::StopDetection()
{
LOG_INFO("Stop detection\n");
// 停止所有相机的检测
for (size_t i = 0; i < m_vrEyeDeviceList.size(); ++i) {
IVrEyeDevice* pDevice = m_vrEyeDeviceList[i].second;
if (pDevice) {
int ret = pDevice->StopDetect();
if (ret == 0) {
LOG_INFO("Camera %zu stop detection successfully\n", i + 1);
} else {
LOG_WARNING("Camera %zu stop detection failed, error code: %d\n", i + 1, ret);
}
}
}
// 通知UI工作状态变更为"就绪"(如果设备连接正常)
if (m_pStatus) {
// 检查设备连接状态,决定停止后的状态
if (m_bCameraConnected) {
m_currentWorkStatus = WorkStatus::Ready;
m_pStatus->OnWorkStatusChanged(WorkStatus::Ready);
} else {
m_currentWorkStatus = WorkStatus::Error;
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
m_pStatus->OnStatusUpdate("检测已停止");
}
// 设置机械臂工作状态为空闲
return 0;
}
// 加载调试数据进行检测
2025-09-14 14:51:38 +08:00
int LapWeldPresenter::LoadDebugDataAndDetect(const std::string& filePath)
{
LOG_INFO("Loading debug data from file: %s\n", filePath.c_str());
m_currentWorkStatus = WorkStatus::Working;
if (m_pStatus) {
m_pStatus->OnWorkStatusChanged(WorkStatus::Working);
2025-07-23 01:35:14 +08:00
std::string fileName = QFileInfo(QString::fromStdString(filePath)).fileName().toStdString();
m_pStatus->OnStatusUpdate(QString("加载文件:%1").arg(fileName.c_str()).toStdString());
}
// 设置机械臂工作状态为忙碌
int lineNum = 0;
float scanSpeed = 0.0f;
int maxTimeStamp = 0;
int clockPerSecond = 0;
int result = SUCCESS;
2025-07-23 01:35:14 +08:00
// 1. 清空现有的检测数据缓存
_ClearDetectionDataCache();
2025-07-23 01:35:14 +08:00
{
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
// 使用统一的LoadLaserScanData接口自动判断文件格式
result = m_dataLoader.LoadLaserScanData(filePath, m_detectionDataCache, lineNum, scanSpeed, maxTimeStamp, clockPerSecond);
}
2025-07-23 01:35:14 +08:00
if (result != SUCCESS) {
LOG_ERROR("Failed to load debug data: %s\n", m_dataLoader.GetLastError().c_str());
if (m_pStatus) {
m_pStatus->OnStatusUpdate("调试数据加载失败: " + m_dataLoader.GetLastError());
}
return result;
}
LOG_INFO("Successfully loaded %d lines of debug data\n", lineNum);
if (m_pStatus) {
m_pStatus->OnStatusUpdate(QString("成功加载 %1 行调试数据").arg(lineNum).toStdString());
}
2025-07-23 01:35:14 +08:00
// 等待检测完成
result = _DetectTask();
return result;
}
// 为所有相机设置状态回调
2025-09-14 14:51:38 +08:00
void LapWeldPresenter::SetCameraStatusCallback(VzNL_OnNotifyStatusCBEx fNotify, void* param)
{
for (size_t i = 0; i < m_vrEyeDeviceList.size(); i++) {
IVrEyeDevice* pDevice = m_vrEyeDeviceList[i].second;
if (pDevice) {
pDevice->SetStatusCallback(fNotify, param);
LOG_DEBUG("Status callback set for camera %zu\n", i + 1);
}
}
}
// 打开相机
2025-09-14 14:51:38 +08:00
int LapWeldPresenter::_OpenDevice(int cameraIndex, const char* cameraName, const char* cameraIp, ProjectType& projectType)
{
IVrEyeDevice* pDevice = nullptr;
IVrEyeDevice::CreateObject(&pDevice);
int nRet = pDevice->InitDevice();
ERR_CODE_RETURN(nRet);
// 先设置状态回调
2025-09-14 14:51:38 +08:00
nRet = pDevice->SetStatusCallback(&LapWeldPresenter::_StaticCameraNotify, this);
LOG_DEBUG("SetStatusCallback result: %d\n", nRet);
ERR_CODE_RETURN(nRet);
// 尝试打开相机1
nRet = pDevice->OpenDevice(cameraIp, ProjectType::DirectBag == projectType);
// 通过回调更新相机1状态
bool cameraConnected = (SUCCESS == nRet);
if(!cameraConnected){
delete pDevice; // 释放失败的设备
pDevice = nullptr;
}
int arrIdx = cameraIndex - 1;
if(m_vrEyeDeviceList.size() > arrIdx){
m_vrEyeDeviceList[arrIdx] = std::make_pair(cameraName, pDevice); // 直接存储到索引0
}
m_pStatus->OnCamera1StatusChanged(cameraConnected);
m_pStatus->OnStatusUpdate(cameraConnected ? "相机连接成功" : "相机连接失败");
m_bCameraConnected = cameraConnected;
return nRet;
}
// 判断是否可以开始检测
2025-09-14 14:51:38 +08:00
bool LapWeldPresenter::_SinglePreDetection(int cameraIndex)
{
if(m_vrEyeDeviceList.empty()){
LOG_ERROR("No camera device found\n");
if (nullptr != m_pStatus) {
m_pStatus->OnStatusUpdate("未找到相机设备");
}
return false;
}
if(cameraIndex < 1 || cameraIndex > static_cast<int>(m_vrEyeDeviceList.size())){
LOG_ERROR("Invalid camera index: %d, valid range: 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size());
return false;
}
if(m_vrEyeDeviceList[cameraIndex - 1].second == nullptr){
LOG_ERROR("Camera %d is not connected\n", cameraIndex);
return false;
}
return true;
}
2025-09-14 14:51:38 +08:00
int LapWeldPresenter::_SingleDetection(int cameraIndex, bool isStart)
{
int nRet = SUCCESS;
if (isStart) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
QString message = QString("收到信号,启动%1检测").arg(cameraName);
if (nullptr != m_pStatus) {
m_pStatus->OnStatusUpdate(message.toStdString());
}
nRet = StartDetection(cameraIndex);
} else {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
QString message = QString("收到信号,停止%1检测").arg(cameraName);
if (nullptr != m_pStatus) {
m_pStatus->OnStatusUpdate(message.toStdString());
}
nRet = StopDetection();
}
return nRet;
}
// 静态回调函数实现
2025-09-14 14:51:38 +08:00
void LapWeldPresenter::_StaticCameraNotify(EVzDeviceWorkStatus eStatus, void* pExtData, unsigned int nDataLength, void* pInfoParam)
{
2025-09-14 14:51:38 +08:00
// 从pInfoParam获取this指针转换回LapWeldPresenter*类型
LapWeldPresenter* pThis = reinterpret_cast<LapWeldPresenter*>(pInfoParam);
if (pThis)
{
// 调用实例的非静态成员函数
pThis->_CameraNotify(eStatus, pExtData, nDataLength, pInfoParam);
}
}
2025-09-14 14:51:38 +08:00
void LapWeldPresenter::_CameraNotify(EVzDeviceWorkStatus eStatus, void *pExtData, unsigned int nDataLength, void *pInfoParam)
{
LOG_DEBUG("[Camera Notify] received: status=%d\n", (int)eStatus);
switch (eStatus) {
case EVzDeviceWorkStatus::keDeviceWorkStatus_Offline:
{
LOG_WARNING("[Camera Notify] Camera device offline/disconnected\n");
// 更新相机连接状态
m_bCameraConnected = false;
// 通知UI相机状态变更
if (m_pStatus) {
// 这里需要判断是哪个相机离线暂时更新相机1状态
// 实际应用中可能需要通过pInfoParam或其他方式区分具体哪个相机
m_pStatus->OnCamera1StatusChanged(false);
m_pStatus->OnStatusUpdate("相机设备离线");
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
break;
}
case EVzDeviceWorkStatus::keDeviceWorkStatus_Eye_Reconnect:
{
LOG_INFO("[Camera Notify] Camera device online/connected\n");
// 更新相机连接状态
m_bCameraConnected = true;
// 通知UI相机状态变更
if (m_pStatus) {
m_pStatus->OnCamera1StatusChanged(true);
m_pStatus->OnStatusUpdate("相机设备已连接");
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
break;
}
case EVzDeviceWorkStatus::keDeviceWorkStatus_Device_Swing_Finish:
{
LOG_INFO("[Camera Notify] Received scan finish signal from camera\n");
2025-06-19 01:38:50 +08:00
// 发送页面提示信息
if (m_pStatus) {
m_pStatus->OnStatusUpdate("相机扫描完成,开始数据处理...");
}
// 通知检测线程开始处理
m_algoDetectCondition.notify_one();
break;
}
default:
break;
}
}
// 检测数据回调函数静态版本
2025-09-14 14:51:38 +08:00
void LapWeldPresenter::_StaticDetectionCallback(EVzResultDataType eDataType, SVzLaserLineData* pLaserLinePoint, void* pUserData)
{
2025-09-14 14:51:38 +08:00
LapWeldPresenter* pThis = reinterpret_cast<LapWeldPresenter*>(pUserData);
if (pThis) {
pThis->_DetectionCallback(eDataType, pLaserLinePoint, pUserData);
}
}
// 检测数据回调函数实例版本
2025-09-14 14:51:38 +08:00
void LapWeldPresenter::_DetectionCallback(EVzResultDataType eDataType, SVzLaserLineData* pLaserLinePoint, void* pUserData)
{
if (!pLaserLinePoint) {
LOG_WARNING("[Detection Callback] pLaserLinePoint is null\n");
return;
}
if (pLaserLinePoint->nPointCount <= 0) {
LOG_WARNING("[Detection Callback] Point count is zero or negative: %d\n", pLaserLinePoint->nPointCount);
return;
}
if (!pLaserLinePoint->p3DPoint) {
LOG_WARNING("[Detection Callback] p3DPoint is null\n");
return;
}
// 直接存储SVzLaserLineData到统一缓存中
SVzLaserLineData lineData;
memset(&lineData, 0, sizeof(SVzLaserLineData));
// 根据数据类型分配和复制点云数据
if (eDataType == keResultDataType_Position) {
// 复制SVzNL3DPosition数据
if (pLaserLinePoint->p3DPoint && pLaserLinePoint->nPointCount > 0) {
lineData.p3DPoint = new SVzNL3DPosition[pLaserLinePoint->nPointCount];
if (lineData.p3DPoint) {
memcpy(lineData.p3DPoint, pLaserLinePoint->p3DPoint, sizeof(SVzNL3DPosition) * pLaserLinePoint->nPointCount);
}
lineData.p2DPoint = new SVzNL2DPosition[pLaserLinePoint->nPointCount];
if (lineData.p2DPoint) {
memcpy(lineData.p2DPoint, pLaserLinePoint->p2DPoint, sizeof(SVzNL2DPosition) * pLaserLinePoint->nPointCount);
}
}
} else if (eDataType == keResultDataType_PointXYZRGBA) {
// 复制SVzNLPointXYZRGBA数据
if (pLaserLinePoint->p3DPoint && pLaserLinePoint->nPointCount > 0) {
lineData.p3DPoint = new SVzNLPointXYZRGBA[pLaserLinePoint->nPointCount];
if (lineData.p3DPoint) {
memcpy(lineData.p3DPoint, pLaserLinePoint->p3DPoint, sizeof(SVzNLPointXYZRGBA) * pLaserLinePoint->nPointCount);
}
lineData.p2DPoint = new SVzNL2DLRPoint[pLaserLinePoint->nPointCount];
if (lineData.p2DPoint) {
memcpy(lineData.p2DPoint, pLaserLinePoint->p2DPoint, sizeof(SVzNL2DLRPoint) * pLaserLinePoint->nPointCount);
}
}
2025-07-23 01:35:14 +08:00
}
lineData.nPointCount = pLaserLinePoint->nPointCount;
lineData.llTimeStamp = pLaserLinePoint->llTimeStamp;
lineData.llFrameIdx = pLaserLinePoint->llFrameIdx;
lineData.nEncodeNo = pLaserLinePoint->nEncodeNo;
lineData.fSwingAngle = pLaserLinePoint->fSwingAngle;
lineData.bEndOnceScan = pLaserLinePoint->bEndOnceScan;
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
m_detectionDataCache.push_back(std::make_pair(eDataType, lineData));
}
2025-09-14 14:51:38 +08:00
void LapWeldPresenter::CheckAndUpdateWorkStatus()
{
if (m_bCameraConnected) {
m_currentWorkStatus = WorkStatus::Ready;
m_pStatus->OnWorkStatusChanged(WorkStatus::Ready);
} else {
m_currentWorkStatus = WorkStatus::Error;
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
}
2025-09-14 14:51:38 +08:00
void LapWeldPresenter::_AlgoDetectThread()
{
while(m_bAlgoDetectThreadRunning)
{
std::unique_lock<std::mutex> lock(m_algoDetectMutex);
m_algoDetectCondition.wait(lock, [this]() {
return m_currentWorkStatus == WorkStatus::Working;
});
if(!m_bAlgoDetectThreadRunning){
break;
}
// 检查设备状态是否准备就绪
int nRet = _DetectTask();
LOG_ERROR("DetectTask result: %d\n", nRet);
if(nRet != SUCCESS){
m_pStatus->OnWorkStatusChanged(WorkStatus::Error);
}
LOG_DEBUG("Algo Thread end\n");
m_currentWorkStatus = WorkStatus::Ready;
}
}
2025-09-14 14:51:38 +08:00
int LapWeldPresenter::_DetectTask()
{
LOG_INFO("[Algo Thread] Start real detection task using algorithm\n");
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
// 1. 获取缓存的点云数据
if (m_detectionDataCache.empty()) {
LOG_WARNING("No cached detection data available\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("无缓存的检测数据");
}
return ERR_CODE(DEV_DATA_INVALID);
}
// 2. 准备算法输入数据
2025-07-23 01:35:14 +08:00
unsigned int lineNum = 0;
lineNum = m_detectionDataCache.size();
if(m_pStatus){
m_pStatus->OnStatusUpdate("扫描线数:" + std::to_string(lineNum) + ",正在算法检测...");
}
CVrTimeUtils oTimeUtils;
2025-07-23 01:35:14 +08:00
// 获取当前使用的手眼标定矩阵
const CalibMatrix currentClibMatrix = GetClibMatrix(m_currentCameraIndex - 1);
DetectionResult detectionResult;
int nRet = m_pDetectPresenter->DetectLapWeld(m_currentCameraIndex, m_detectionDataCache,
m_algorithmParams, m_debugParam, m_dataLoader,
currentClibMatrix.clibMatrix, detectionResult);
// 根据项目类型选择处理方式
if (m_pStatus) {
QString err = QString("错误:%1").arg(nRet);
m_pStatus->OnStatusUpdate(QString("检测%1").arg(SUCCESS == nRet ? "成功": err).toStdString());
}
ERR_CODE_RETURN(nRet);
2025-09-14 14:51:38 +08:00
LOG_INFO("[Algo Thread] sx_getLapWeldPostion detected %zu objects time : %.2f ms\n", detectionResult.positions.size(), oTimeUtils.GetElapsedTimeInMilliSec());
2025-07-23 01:35:14 +08:00
// 8. 返回检测结果
2025-07-23 01:35:14 +08:00
detectionResult.cameraIndex = m_currentCameraIndex;
// 调用检测结果回调函数
m_pStatus->OnDetectionResult(detectionResult);
// 更新状态
2025-07-23 01:35:14 +08:00
QString statusMsg = QString("检测完成,发现%1个目标").arg(detectionResult.positions.size());
m_pStatus->OnStatusUpdate(statusMsg.toStdString());
// 发送检测结果给TCP客户端
_SendDetectionResultToTCP(detectionResult, m_currentCameraIndex);
// 9. 检测完成后,将工作状态更新为"完成"
2025-06-08 12:48:04 +08:00
if (m_pStatus) {
m_currentWorkStatus = WorkStatus::Completed;
2025-06-08 12:48:04 +08:00
m_pStatus->OnWorkStatusChanged(WorkStatus::Completed);
}
// 恢复到就绪状态
m_currentWorkStatus = WorkStatus::Ready;
return SUCCESS;
}
// 释放缓存的检测数据
2025-09-14 14:51:38 +08:00
void LapWeldPresenter::_ClearDetectionDataCache()
{
std::lock_guard<std::mutex> lock(m_detectionDataMutex);
// 释放加载的数据
m_dataLoader.FreeLaserScanData(m_detectionDataCache);
LOG_DEBUG("Detection data cache cleared successfully\n");
}
// 发送检测结果给TCP客户端
void LapWeldPresenter::_SendDetectionResultToTCP(const DetectionResult& detectionResult, int cameraIndex)
{
LOG_INFO("Sending detection result for camera %d\n", cameraIndex);
// 检查是否有检测结果
if (detectionResult.positions.empty()) {
LOG_INFO("No objects detected\n");
if (m_pStatus) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
m_pStatus->OnStatusUpdate(QString("检测完成,%1未发现目标").arg(cameraName).toStdString());
}
} else {
LOG_INFO("Detected %zu objects for camera %d\n", detectionResult.positions.size(), cameraIndex);
if (m_pStatus) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
m_pStatus->OnStatusUpdate(QString("检测完成,%1发现%2个目标").arg(cameraName).arg(detectionResult.positions.size()).toStdString());
}
}
}
2025-06-08 12:48:04 +08:00
// 实现配置改变通知接口
2025-09-14 14:51:38 +08:00
void LapWeldPresenter::OnConfigChanged(const ConfigResult& configResult)
{
LOG_INFO("Configuration changed notification received, reloading algorithm parameters\n");
// 更新调试参数
m_debugParam = configResult.debugParam;
// 重新初始化算法参数
int result = InitAlgorithmParams();
if (result == SUCCESS) {
LOG_INFO("Algorithm parameters reloaded successfully after config change\n");
if (m_pStatus) {
m_pStatus->OnStatusUpdate("配置已更新,算法参数重新加载成功");
}
} else {
LOG_ERROR("Failed to reload algorithm parameters after config change, error: %d\n", result);
if (m_pStatus) {
m_pStatus->OnStatusUpdate("配置更新后算法参数重新加载失败");
}
}
}
// 根据相机索引获取调平参数
2025-09-14 14:51:38 +08:00
SSG_planeCalibPara LapWeldPresenter::_GetCameraCalibParam(int cameraIndex)
{
// 查找指定相机索引的调平参数
2025-07-23 01:35:14 +08:00
SSG_planeCalibPara calibParam;
// 使用单位矩阵(未校准状态)
double identityMatrix[9] = {1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0};
for (int i = 0; i < 9; i++) {
calibParam.planeCalib[i] = identityMatrix[i];
calibParam.invRMatrix[i] = identityMatrix[i];
}
calibParam.planeHeight = -1.0; // 使用默认高度
for (const auto& cameraParam : m_algorithmParams.planeCalibParam.cameraCalibParams) {
if (cameraParam.cameraIndex == cameraIndex) {
// 根据isCalibrated标志决定使用标定矩阵还是单位矩阵
if (cameraParam.isCalibrated) {
// 使用实际的标定矩阵
for (int i = 0; i < 9; i++) {
calibParam.planeCalib[i] = cameraParam.planeCalib[i];
calibParam.invRMatrix[i] = cameraParam.invRMatrix[i];
}
2025-07-23 01:35:14 +08:00
calibParam.planeHeight = cameraParam.planeHeight;
}
}
}
2025-07-23 01:35:14 +08:00
return calibParam;
}
// ============ 实现 IConfigCommandHandler 接口 ============
bool LapWeldPresenter::OnCameraExposeCommand(const CameraConfigParam& param)
{
LOG_INFO("Applying camera expose setting: camera %d, expose time: %.2f\n",
param.cameraIndex, param.exposeTime);
if (param.cameraIndex == -1) {
// 应用到所有相机
for (size_t i = 0; i < m_vrEyeDeviceList.size(); i++) {
IVrEyeDevice* device = m_vrEyeDeviceList[i].second;
if (device && param.exposeTime > 0) {
unsigned int exposeTime = static_cast<unsigned int>(param.exposeTime);
device->SetEyeExpose(exposeTime);
}
}
} else {
// 应用到指定相机
int arrayIndex = param.cameraIndex - 1;
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* device = m_vrEyeDeviceList[arrayIndex].second;
if (device && param.exposeTime > 0) {
unsigned int exposeTime = static_cast<unsigned int>(param.exposeTime);
device->SetEyeExpose(exposeTime);
}
}
}
return true;
}
bool LapWeldPresenter::OnCameraGainCommand(const CameraConfigParam& param)
{
LOG_INFO("Applying camera gain setting: camera %d, gain: %.2f\n",
param.cameraIndex, param.gain);
if (param.cameraIndex == -1) {
// 应用到所有相机
for (size_t i = 0; i < m_vrEyeDeviceList.size(); i++) {
IVrEyeDevice* device = m_vrEyeDeviceList[i].second;
if (device && param.gain > 0) {
unsigned int gain = static_cast<unsigned int>(param.gain);
device->SetEyeGain(gain);
}
}
} else {
// 应用到指定相机
int arrayIndex = param.cameraIndex - 1;
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* device = m_vrEyeDeviceList[arrayIndex].second;
if (device && param.gain > 0) {
unsigned int gain = static_cast<unsigned int>(param.gain);
device->SetEyeGain(gain);
}
}
}
return true;
}
bool LapWeldPresenter::OnCameraFrameRateCommand(const CameraConfigParam& param)
{
LOG_INFO("Applying camera frame rate setting: camera %d, frame rate: %.2f\n",
param.cameraIndex, param.frameRate);
if (param.cameraIndex == -1) {
// 应用到所有相机
for (size_t i = 0; i < m_vrEyeDeviceList.size(); i++) {
IVrEyeDevice* device = m_vrEyeDeviceList[i].second;
if (device && param.frameRate > 0) {
int frameRate = static_cast<int>(param.frameRate);
device->SetFrame(frameRate);
}
}
} else {
// 应用到指定相机
int arrayIndex = param.cameraIndex - 1;
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* device = m_vrEyeDeviceList[arrayIndex].second;
if (device && param.frameRate > 0) {
int frameRate = static_cast<int>(param.frameRate);
device->SetFrame(frameRate);
}
}
}
return true;
}
bool LapWeldPresenter::OnCameraSwingCommand(const SwingConfigParam& param)
{
LOG_INFO("Applying camera swing setting: camera %d, speed: %.2f, angles: %.2f-%.2f\n",
param.cameraIndex, param.swingSpeed, param.startAngle, param.stopAngle);
if (param.cameraIndex == -1) {
// 应用到所有相机
for (size_t i = 0; i < m_vrEyeDeviceList.size(); i++) {
IVrEyeDevice* device = m_vrEyeDeviceList[i].second;
if (device) {
if (param.swingSpeed > 0) {
float swingSpeed = static_cast<float>(param.swingSpeed);
device->SetSwingSpeed(swingSpeed);
}
if (param.startAngle != param.stopAngle) {
float startAngle = static_cast<float>(param.startAngle);
float stopAngle = static_cast<float>(param.stopAngle);
device->SetSwingAngle(startAngle, stopAngle);
}
}
}
} else {
// 应用到指定相机
int arrayIndex = param.cameraIndex - 1;
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* device = m_vrEyeDeviceList[arrayIndex].second;
if (device) {
if (param.swingSpeed > 0) {
float swingSpeed = static_cast<float>(param.swingSpeed);
device->SetSwingSpeed(swingSpeed);
}
if (param.startAngle != param.stopAngle) {
float startAngle = static_cast<float>(param.startAngle);
float stopAngle = static_cast<float>(param.stopAngle);
device->SetSwingAngle(startAngle, stopAngle);
}
}
}
}
return true;
}
bool LapWeldPresenter::OnAlgoParamCommand(const AlgoConfigParam& param)
{
LOG_INFO("Applying algorithm parameter: %s = %.3f\n", param.paramName, param.paramValue);
// 更新算法参数(具体实现取决于算法库接口)
if (m_pStatus) {
m_pStatus->OnStatusUpdate("算法参数已更新");
}
return true;
}
bool LapWeldPresenter::OnCalibParamCommand(const CalibConfigParam& param)
{
LOG_INFO("Applying calibration parameter for camera %d\n", param.cameraIndex);
// 更新标定参数(具体实现取决于标定数据结构)
return true;
}
bool LapWeldPresenter::OnFullConfigCommand(const FullConfigParam& param)
{
LOG_INFO("Applying full configuration update\n");
// 重新加载完整配置
QString configPath = PathManager::GetInstance().GetConfigFilePath();
if (m_vrConfig) {
m_configResult = m_vrConfig->LoadConfig(configPath.toStdString());
// 重新初始化算法参数
InitAlgorithmParams();
if (m_pStatus) {
m_pStatus->OnStatusUpdate("配置已重新加载");
}
return true;
}
return false;
}
// 设置默认相机索引
2025-09-14 14:51:38 +08:00
void LapWeldPresenter::SetDefaultCameraIndex(int cameraIndex)
{
LOG_INFO("Setting default camera index from %d to %d\n", m_currentCameraIndex, cameraIndex);
// 验证相机索引的有效性cameraIndex是配置中的索引从1开始
if (cameraIndex < 1 || cameraIndex > static_cast<int>(m_vrEyeDeviceList.size())) {
LOG_WARNING("Invalid camera index: %d, valid range: 1-%zu\n", cameraIndex, m_vrEyeDeviceList.size());
if (m_pStatus) {
m_pStatus->OnStatusUpdate(QString("无效的相机索引: %1有效范围: 1-%2").arg(cameraIndex).arg(m_vrEyeDeviceList.size()).toStdString());
}
return;
}
// 更新默认相机索引
m_currentCameraIndex = cameraIndex;
LOG_INFO("Default camera index updated to %d\n", m_currentCameraIndex);
if (m_pStatus) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
m_pStatus->OnStatusUpdate(QString("设置%1为默认相机").arg(cameraName).toStdString());
}
}
// 保存检测数据到文件(默认实现)
2025-09-14 14:51:38 +08:00
int LapWeldPresenter::SaveDetectionDataToFile(const std::string& filePath)
{
LOG_INFO("Saving detection data to file: %s\n", filePath.c_str());
if (m_detectionDataCache.empty()) {
LOG_WARNING("No detection data available for saving\n");
return ERR_CODE(DEV_DATA_INVALID);
}
// 保存数据到文件
int lineNum = static_cast<int>(m_detectionDataCache.size());
float scanSpeed = 0.0f;
int maxTimeStamp = 0;
int clockPerSecond = 0;
int result = m_dataLoader.SaveLaserScanData(filePath, m_detectionDataCache, lineNum, scanSpeed, maxTimeStamp, clockPerSecond);
if (result == SUCCESS) {
LOG_INFO("Successfully saved %d lines of detection data to file: %s\n", lineNum, filePath.c_str());
} else {
LOG_ERROR("Failed to save detection data, error: %s\n", m_dataLoader.GetLastError().c_str());
}
return result;
}
// ============ 实现 ICameraLevelCalculator 接口 ============
bool LapWeldPresenter::CalculatePlaneCalibration(
const std::vector<std::pair<EVzResultDataType, SVzLaserLineData>>& scanData,
double planeCalib[9],
double& planeHeight,
double invRMatrix[9])
{
try {
// 检查是否有足够的扫描数据
if (scanData.empty()) {
LOG_ERROR("No scan data available for plane calibration\n");
return false;
}
LOG_INFO("Calculating plane calibration from %zu scan lines\n", scanData.size());
// 转换为算法需要的XYZ格式
LaserDataLoader dataLoader;
std::vector<std::vector<SVzNL3DPosition>> xyzData;
int convertResult = dataLoader.ConvertToSVzNL3DPosition(scanData, xyzData);
if (convertResult != SUCCESS || xyzData.empty()) {
LOG_WARNING("Failed to convert data to XYZ format or no XYZ data available\n");
return false;
}
// 调用焊接项目的调平算法
SSG_planeCalibPara calibResult = sx_getBaseCalibPara(xyzData);
// 将结构体中的数据复制到输出参数
for (int i = 0; i < 9; i++) {
planeCalib[i] = calibResult.planeCalib[i];
invRMatrix[i] = calibResult.invRMatrix[i];
}
planeHeight = calibResult.planeHeight;
LOG_INFO("Plane calibration calculated successfully: height=%.3f\n", planeHeight);
return true;
} catch (const std::exception& e) {
LOG_ERROR("Exception in CalculatePlaneCalibration: %s\n", e.what());
return false;
} catch (...) {
LOG_ERROR("Unknown exception in CalculatePlaneCalibration\n");
return false;
}
}
// ============ 实现 ICameraLevelResultSaver 接口 ============
bool LapWeldPresenter::SaveLevelingResults(double planeCalib[9], double planeHeight, double invRMatrix[9],
int cameraIndex, const QString& cameraName)
{
try {
if (!m_vrConfig) {
LOG_ERROR("Config is null, cannot save leveling results\n");
return false;
}
// 验证传入的相机参数
if (cameraIndex <= 0) {
LOG_ERROR("Invalid camera index: %d\n", cameraIndex);
return false;
}
if (cameraName.isEmpty()) {
LOG_ERROR("Camera name is empty\n");
return false;
}
// 加载当前配置
QString configPath = PathManager::GetInstance().GetConfigFilePath();
LOG_INFO("Config path: %s\n", configPath.toUtf8().constData());
ConfigResult configResult = m_vrConfig->LoadConfig(configPath.toStdString());
// 创建或更新指定相机的调平参数
VrCameraPlaneCalibParam cameraParam;
cameraParam.cameraIndex = cameraIndex;
cameraParam.cameraName = cameraName.toStdString();
cameraParam.planeHeight = planeHeight;
cameraParam.isCalibrated = true;
// 复制校准矩阵
for (int i = 0; i < 9; i++) {
cameraParam.planeCalib[i] = planeCalib[i];
cameraParam.invRMatrix[i] = invRMatrix[i];
}
// 更新配置中的相机校准参数
configResult.algorithmParams.planeCalibParam.SetCameraCalibParam(cameraParam);
// 保存配置
bool saveResult = m_vrConfig->SaveConfig(configPath.toStdString(), configResult);
if (!saveResult) {
LOG_ERROR("Failed to save config with leveling results\n");
return false;
}
LOG_INFO("Leveling results saved successfully for camera %d (%s)\n", cameraIndex, cameraName.toUtf8().constData());
LOG_INFO("Plane height: %.3f\n", planeHeight);
LOG_INFO("Calibration marked as completed\n");
return true;
} catch (const std::exception& e) {
LOG_ERROR("Exception in SaveLevelingResults: %s\n", e.what());
return false;
}
}
bool LapWeldPresenter::LoadLevelingResults(int cameraIndex, const QString& cameraName,
double planeCalib[9], double& planeHeight, double invRMatrix[9])
{
try {
if (!m_vrConfig) {
LOG_ERROR("Config is null, cannot load calibration data\n");
return false;
}
// 加载配置文件
QString configPath = PathManager::GetInstance().GetConfigFilePath();
ConfigResult configResult = m_vrConfig->LoadConfig(configPath.toStdString());
// 获取指定相机的标定参数
VrCameraPlaneCalibParam cameraParamValue;
if (!configResult.algorithmParams.planeCalibParam.GetCameraCalibParam(cameraIndex, cameraParamValue) || !cameraParamValue.isCalibrated) {
LOG_INFO("No calibration data found for camera %d (%s)\n", cameraIndex, cameraName.toUtf8().constData());
return false;
}
// 复制标定数据
for (int i = 0; i < 9; i++) {
planeCalib[i] = cameraParamValue.planeCalib[i];
invRMatrix[i] = cameraParamValue.invRMatrix[i];
}
planeHeight = cameraParamValue.planeHeight;
LOG_INFO("Calibration data loaded successfully for camera %d (%s)\n", cameraIndex, cameraName.toUtf8().constData());
LOG_INFO("Plane height: %.3f\n", planeHeight);
return true;
} catch (const std::exception& e) {
LOG_ERROR("Exception in LoadLevelingResults: %s\n", e.what());
return false;
}
}