GrabBag/App/Workpiece/WorkpieceApp/Presenter/Src/WorkpiecePresenter.cpp

803 lines
30 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#include "WorkpiecePresenter.h"
#include "VrError.h"
#include "VrLog.h"
#include <QtCore/QCoreApplication>
#include <QtCore/QFileInfo>
#include <QtCore/QDir>
#include <QtCore/QString>
#include <QtCore/QStandardPaths>
#include <QtCore/QFile>
#include <QtCore/QDateTime>
#include <cmath>
#include <algorithm>
#include <QImage>
#include <QThread>
#include <atomic>
#include <QJsonObject>
#include <QJsonArray>
#include "Version.h"
#include "VrTimeUtils.h"
#include "VrDateUtils.h"
#include "SG_baseDataType.h"
#include "VrConvert.h"
#include "TCPServerProtocol.h"
#include "DetectPresenter.h"
#include "PathManager.h"
#include "BQ_workpieceCornerExtraction_Export.h" // 调平算法接口
WorkpiecePresenter::WorkpiecePresenter(QObject *parent)
: BasePresenter(parent)
, m_vrConfig(nullptr)
, m_pDetectPresenter(nullptr)
, m_pTCPServer(nullptr)
, m_bTCPConnected(false)
{
// 基类已经创建了相机重连定时器和检测数据缓存
// 设置配置命令处理器
m_configMonitor.SetCommandHandler(this);
}
WorkpiecePresenter::~WorkpiecePresenter()
{
// 基类会自动处理:相机重连定时器、算法检测线程、检测数据缓存、相机设备资源
// 停止配置监控器
m_configMonitor.Stop();
// 释放配置实例
if (m_vrConfig) {
delete m_vrConfig;
m_vrConfig = nullptr;
}
// 释放TCP服务器
if (m_pTCPServer) {
m_pTCPServer->Deinitialize();
delete m_pTCPServer;
m_pTCPServer = nullptr;
}
// 释放检测处理器
if(m_pDetectPresenter)
{
delete m_pDetectPresenter;
m_pDetectPresenter = nullptr;
}
}
int WorkpiecePresenter::InitApp()
{
LOG_DEBUG("Start APP Version: %s\n", WORKPIECE_FULL_VERSION_STRING);
// 初始化连接状态
SetWorkStatus(WorkStatus::InitIng);
m_pDetectPresenter = new DetectPresenter();
// 获取配置文件路径
QString configPath = PathManager::GetInstance().GetConfigFilePath();
int nRet = SUCCESS;
// 创建 IVrConfig 实例
if (!IVrConfig::CreateInstance(&m_vrConfig) || !m_vrConfig) {
LOG_ERROR("Failed to create VrConfig instance\n");
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("配置实例创建失败");
return ERR_CODE(DEV_CONFIG_ERR);
}
// 加载配置文件
ConfigResult configResult;
int ret = m_vrConfig->LoadConfig(configPath.toStdString(), configResult);
if (ret != LOAD_CONFIG_SUCCESS) {
LOG_ERROR("Failed to load config file, error code: %d\n", ret);
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("配置文件加载失败");
return ERR_CODE(DEV_CONFIG_ERR);
}
LOG_INFO("Configuration loaded successfully\n");
// 保存配置结果
m_configResult = configResult;
// 设置配置改变通知回调
m_vrConfig->SetConfigChangeNotify(this);
// 启动共享内存监控(可选,根据需要)
if (!m_configMonitor.Start()) {
LOG_WARNING("Failed to start config monitor, runtime config updates will not work\n");
// 不返回错误,允许应用继续运行
} else {
LOG_INFO("Config monitor started successfully\n");
}
// 调用基类InitCamera进行相机初始化bRGB=false, bSwing=true
InitCamera(configResult.cameraList, false, true);
LOG_INFO("Camera initialization completed. Connected cameras: %zu, default camera index: %d\n",
m_vrEyeDeviceList.size(), m_currentCameraIndex);
// 初始化TCP服务器
nRet = InitTCPServer();
if (nRet != 0) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("TCP服务器初始化失败");
m_bTCPConnected = false;
} else {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("TCP服务器初始化成功");
}
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("设备初始化完成");
CheckAndUpdateWorkStatus();
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("配置初始化成功");
return SUCCESS;
}
// 初始化算法参数实现BasePresenter纯虚函数
int WorkpiecePresenter::InitAlgoParams()
{
LOG_DEBUG("initializing algorithm parameters\n");
QString exePath = QCoreApplication::applicationFilePath();
// 清空现有的手眼标定矩阵列表
m_clibMatrixList.clear();
// 获取手眼标定文件路径并确保文件存在
QString clibPath = PathManager::GetInstance().GetCalibrationFilePath();
LOG_INFO("Loading hand-eye matrices from: %s\n", clibPath.toStdString().c_str());
// 读取存在的矩阵数量
int nExistMatrixNum = CVrConvert::GetClibMatrixCount(clibPath.toStdString().c_str());
LOG_INFO("Found %d hand-eye calibration matrices\n", nExistMatrixNum);
// 循环加载每个矩阵
for(int matrixIndex = 0; matrixIndex < nExistMatrixNum; matrixIndex++)
{
// 构造矩阵标识符
char matrixIdent[64];
#ifdef _WIN32
sprintf_s(matrixIdent, "CalibMatrixInfo_%d", matrixIndex);
#else
sprintf(matrixIdent, "CalibMatrixInfo_%d", matrixIndex);
#endif
// 创建新的标定矩阵结构
CalibMatrix calibMatrix;
// 初始化为单位矩阵
double initClibMatrix[16] = {
1.0, 0.0, 0.0, 0.0, // 第一行
0.0, 1.0, 0.0, 0.0, // 第二行
0.0, 0.0, 1.0, 0.0, // 第三行
0.0, 0.0, 0.0, 1.0 // 第四行
};
// 加载矩阵数据
bool loadSuccess = CVrConvert::LoadClibMatrix(clibPath.toStdString().c_str(), matrixIdent, "dCalibMatrix", calibMatrix.clibMatrix);
if(loadSuccess)
{
m_clibMatrixList.push_back(calibMatrix);
LOG_INFO("Successfully loaded matrix %d\n", matrixIndex);
// 输出矩阵内容
QString clibMatrixStr;
LOG_INFO("Matrix %d content:\n", matrixIndex);
for (int i = 0; i < 4; ++i) {
clibMatrixStr.clear();
for (int j = 0; j < 4; ++j) {
clibMatrixStr += QString::asprintf("%8.4f ", calibMatrix.clibMatrix[i * 4 + j]);
}
LOG_INFO(" %s\n", clibMatrixStr.toStdString().c_str());
}
}
else
{
LOG_WARNING("Failed to load matrix %d, using identity matrix\n", matrixIndex);
// 如果加载失败,使用单位矩阵
memcpy(calibMatrix.clibMatrix, initClibMatrix, sizeof(initClibMatrix));
m_clibMatrixList.push_back(calibMatrix);
}
}
LOG_INFO("Total loaded %zu hand-eye calibration matrices\n", m_clibMatrixList.size());
// 使用已加载的配置结果(在 InitApp 中已经加载)
const VrAlgorithmParams& xmlParams = m_configResult.algorithmParams;
// 保存调试参数
m_debugParam = m_configResult.debugParam;
LOG_INFO("Loaded XML params - Workpiece: lineLen=%.1f\n", xmlParams.workpieceParam.lineLen);
LOG_INFO("Loaded XML params - Filter: continuityTh=%.1f, outlierTh=%.1f\n", xmlParams.filterParam.continuityTh, xmlParams.filterParam.outlierTh);
// 直接使用配置结构
m_algorithmParams = xmlParams;
LOG_INFO("Algorithm parameters initialized successfully:\n");
LOG_INFO(" Workpiece: lineLen=%.1f\n",
m_algorithmParams.workpieceParam.lineLen);
// 循环打印所有相机的调平参数(添加安全检查)
LOG_INFO("Loading plane calibration parameters for all cameras:\n");
if (!m_algorithmParams.planeCalibParam.cameraCalibParams.empty()) {
for (const auto& cameraParam : m_algorithmParams.planeCalibParam.cameraCalibParams) {
try {
LOG_INFO("Camera %d (%s) calibration parameters:\n",
cameraParam.cameraIndex, cameraParam.cameraName.c_str());
LOG_INFO(" Is calibrated: %s\n", cameraParam.isCalibrated ? "YES" : "NO");
LOG_INFO(" Plane height: %.3f\n", cameraParam.planeHeight);
LOG_INFO(" Plane calibration matrix:\n");
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[0], cameraParam.planeCalib[1], cameraParam.planeCalib[2]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[3], cameraParam.planeCalib[4], cameraParam.planeCalib[5]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[6], cameraParam.planeCalib[7], cameraParam.planeCalib[8]);
LOG_INFO(" Inverse rotation matrix:\n");
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[0], cameraParam.invRMatrix[1], cameraParam.invRMatrix[2]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[3], cameraParam.invRMatrix[4], cameraParam.invRMatrix[5]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[6], cameraParam.invRMatrix[7], cameraParam.invRMatrix[8]);
LOG_INFO(" --------------------------------\n");
} catch (const std::exception& e) {
LOG_ERROR("Exception while printing camera calibration parameters: %s\n", e.what());
} catch (...) {
LOG_ERROR("Unknown exception while printing camera calibration parameters\n");
}
}
} else {
LOG_WARNING("No plane calibration parameters found in configuration\n");
}
return SUCCESS;
}
// 手眼标定矩阵管理方法实现
CalibMatrix WorkpiecePresenter::GetClibMatrix(int index) const
{
CalibMatrix clibMatrix;
double initClibMatrix[16] = {
1.0, 0.0, 0.0, 0.0, // 第一行
0.0, 1.0, 0.0, 0.0, // 第二行
0.0, 0.0, 1.0, 0.0, // 第三行
0.0, 0.0, 0.0, 1.0 // 第四行
};
memcpy(clibMatrix.clibMatrix, initClibMatrix, sizeof(initClibMatrix));
if (index >= 0 && index < static_cast<int>(m_clibMatrixList.size())) {
clibMatrix = m_clibMatrixList[index];
memcpy(clibMatrix.clibMatrix, m_clibMatrixList[index].clibMatrix, sizeof(initClibMatrix));
} else {
LOG_WARNING("Invalid hand-eye calibration matrix\n");
}
return clibMatrix;
}
void WorkpiecePresenter::CheckAndUpdateWorkStatus()
{
if (m_bCameraConnected) {
SetWorkStatus(WorkStatus::Ready);
} else {
SetWorkStatus(WorkStatus::Error);
}
}
// 实现BasePresenter纯虚函数执行算法检测
int WorkpiecePresenter::ProcessAlgoDetection(std::vector<std::pair<EVzResultDataType, SVzLaserLineData>>& detectionDataCache)
{
LOG_INFO("[Algo Thread] Start real detection task using algorithm\n");
// 1. 获取缓存的点云数据(已由基类验证非空)
unsigned int lineNum = detectionDataCache.size();
if(GetStatusCallback<IYWorkpieceStatus>()){
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("扫描线数:" + std::to_string(lineNum) + ",正在算法检测...");
}
// 检查检测处理器是否已初始化
if (!m_pDetectPresenter) {
LOG_ERROR("DetectPresenter is null, cannot proceed with detection\n");
if (GetStatusCallback<IYWorkpieceStatus>()) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("检测处理器未初始化");
}
return ERR_CODE(DEV_NOT_FIND);
}
CVrTimeUtils oTimeUtils;
// 获取当前使用的手眼标定矩阵
const CalibMatrix currentClibMatrix = GetClibMatrix(m_currentCameraIndex - 1);
DetectionResult detectionResult;
int nRet = m_pDetectPresenter->DetectWorkpiece(m_currentCameraIndex, detectionDataCache,
m_algorithmParams, m_debugParam, m_dataLoader,
currentClibMatrix.clibMatrix, detectionResult);
// 根据项目类型选择处理方式
if (GetStatusCallback<IYWorkpieceStatus>()) {
QString err = QString("错误:%1").arg(nRet);
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate(QString("检测%1").arg(SUCCESS == nRet ? "成功": err).toStdString());
}
LOG_INFO("[Algo Thread] sx_getWorkpiecePostion detected %zu objects time : %.2f ms\n", detectionResult.positions.size(), oTimeUtils.GetElapsedTimeInMilliSec());
ERR_CODE_RETURN(nRet);
// 8. 通知UI检测结果
detectionResult.cameraIndex = m_currentCameraIndex;
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) {
pStatus->OnDetectionResult(detectionResult);
}
// 更新状态
QString statusMsg = QString("检测完成,发现%1个目标").arg(detectionResult.positions.size());
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate(statusMsg.toStdString());
// 发送检测结果给TCP客户端
_SendDetectionResultToTCP(detectionResult, m_currentCameraIndex);
// 9. 检测完成后,将工作状态更新为"完成"
SetWorkStatus(WorkStatus::Completed);
// 恢复到就绪状态
SetWorkStatus(WorkStatus::Ready);
return SUCCESS;
}
// 实现配置改变通知接口
void WorkpiecePresenter::OnConfigChanged(const ConfigResult& configResult)
{
LOG_INFO("Configuration changed notification received, reloading algorithm parameters\n");
// 更新调试参数
m_debugParam = configResult.debugParam;
// 重新初始化算法参数
int result = InitAlgoParams();
if (result == SUCCESS) {
LOG_INFO("Algorithm parameters reloaded successfully after config change\n");
if (GetStatusCallback<IYWorkpieceStatus>()) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("配置已更新,算法参数重新加载成功");
}
} else {
LOG_ERROR("Failed to reload algorithm parameters after config change, error: %d\n", result);
if (GetStatusCallback<IYWorkpieceStatus>()) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("配置更新后算法参数重新加载失败");
}
}
}
// 根据相机索引获取调平参数
SSG_planeCalibPara WorkpiecePresenter::_GetCameraCalibParam(int cameraIndex)
{
// 查找指定相机索引的调平参数
SSG_planeCalibPara calibParam;
// 使用单位矩阵(未校准状态)
double identityMatrix[9] = {1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0};
for (int i = 0; i < 9; i++) {
calibParam.planeCalib[i] = identityMatrix[i];
calibParam.invRMatrix[i] = identityMatrix[i];
}
calibParam.planeHeight = -1.0; // 使用默认高度
for (const auto& cameraParam : m_algorithmParams.planeCalibParam.cameraCalibParams) {
if (cameraParam.cameraIndex == cameraIndex) {
// 根据isCalibrated标志决定使用标定矩阵还是单位矩阵
if (cameraParam.isCalibrated) {
// 使用实际的标定矩阵
for (int i = 0; i < 9; i++) {
calibParam.planeCalib[i] = cameraParam.planeCalib[i];
calibParam.invRMatrix[i] = cameraParam.invRMatrix[i];
}
calibParam.planeHeight = cameraParam.planeHeight;
}
}
}
return calibParam;
}
// 实现BasePresenter纯虚函数相机状态变化通知
void WorkpiecePresenter::OnCameraStatusChanged(int cameraIndex, bool isConnected)
{
LOG_INFO("Camera %d status changed: %s\n", cameraIndex, isConnected ? "connected" : "disconnected");
// 通知UI更新相机状态
if (GetStatusCallback<IYWorkpieceStatus>()) {
if (cameraIndex == 1) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnCamera1StatusChanged(isConnected);
} else if (cameraIndex == 2) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnCamera2StatusChanged(isConnected);
}
// 获取相机名称用于状态消息
QString cameraName;
int arrayIndex = cameraIndex - 1;
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
cameraName = QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first);
} else {
cameraName = QString("相机%1").arg(cameraIndex);
}
QString statusMsg = QString("%1%2").arg(cameraName).arg(isConnected ? "已连接" : "已断开");
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate(statusMsg.toStdString());
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
}
// 实现BasePresenter虚函数工作状态变化通知
void WorkpiecePresenter::OnWorkStatusChanged(WorkStatus status)
{
auto pStatus = GetStatusCallback<IYWorkpieceStatus>();
if (pStatus) {
pStatus->OnWorkStatusChanged(status);
}
}
// 实现BasePresenter虚函数相机数量变化通知
void WorkpiecePresenter::OnCameraCountChanged(int count)
{
auto pStatus = GetStatusCallback<IYWorkpieceStatus>();
if (pStatus) {
pStatus->OnCameraCountChanged(count);
}
}
// 实现BasePresenter虚函数状态文字更新通知
void WorkpiecePresenter::OnStatusUpdate(const std::string& statusMessage)
{
auto pStatus = GetStatusCallback<IYWorkpieceStatus>();
if (pStatus) {
pStatus->OnStatusUpdate(statusMessage);
}
}
// ============ 实现 ICameraLevelCalculator 接口 ============
bool WorkpiecePresenter::CalculatePlaneCalibration(
const std::vector<std::pair<EVzResultDataType, SVzLaserLineData>>& scanData,
double planeCalib[9],
double& planeHeight,
double invRMatrix[9])
{
try {
// 检查是否有足够的扫描数据
if (scanData.empty()) {
LOG_ERROR("No scan data available for plane calibration\n");
return false;
}
LOG_INFO("Calculating plane calibration from %zu scan lines\n", scanData.size());
// 转换为算法需要的XYZ格式
LaserDataLoader dataLoader;
std::vector<std::vector<SVzNL3DPosition>> xyzData;
int convertResult = dataLoader.ConvertToSVzNL3DPosition(scanData, xyzData);
if (convertResult != SUCCESS || xyzData.empty()) {
LOG_WARNING("Failed to convert data to XYZ format or no XYZ data available\n");
return false;
}
// 调用工件项目的调平算法
SSG_planeCalibPara calibResult = sx_BQ_getBaseCalibPara(xyzData);
// 将结构体中的数据复制到输出参数
for (int i = 0; i < 9; i++) {
planeCalib[i] = calibResult.planeCalib[i];
invRMatrix[i] = calibResult.invRMatrix[i];
}
planeHeight = calibResult.planeHeight;
LOG_INFO("Plane calibration calculated successfully: height=%.3f\n", planeHeight);
return true;
} catch (const std::exception& e) {
LOG_ERROR("Exception in CalculatePlaneCalibration: %s\n", e.what());
return false;
} catch (...) {
LOG_ERROR("Unknown exception in CalculatePlaneCalibration\n");
return false;
}
}
// ============ 实现 ICameraLevelResultSaver 接口 ============
bool WorkpiecePresenter::SaveLevelingResults(double planeCalib[9], double planeHeight, double invRMatrix[9],
int cameraIndex, const QString& cameraName)
{
try {
if (!m_vrConfig) {
LOG_ERROR("Config is null, cannot save leveling results\n");
return false;
}
// 验证传入的相机参数
if (cameraIndex <= 0) {
LOG_ERROR("Invalid camera index: %d\n", cameraIndex);
return false;
}
if (cameraName.isEmpty()) {
LOG_ERROR("Camera name is empty\n");
return false;
}
// 加载当前配置
QString configPath = PathManager::GetInstance().GetConfigFilePath();
LOG_INFO("Config path: %s\n", configPath.toUtf8().constData());
ConfigResult configResult;
int ret = m_vrConfig->LoadConfig(configPath.toStdString(), configResult);
if (ret != LOAD_CONFIG_SUCCESS) {
LOG_ERROR("Failed to load config file, error code: %d\n", ret);
return false;
}
// 创建或更新指定相机的调平参数
VrCameraPlaneCalibParam cameraParam;
cameraParam.cameraIndex = cameraIndex;
cameraParam.cameraName = cameraName.toStdString();
cameraParam.planeHeight = planeHeight;
cameraParam.isCalibrated = true;
// 复制校准矩阵
for (int i = 0; i < 9; i++) {
cameraParam.planeCalib[i] = planeCalib[i];
cameraParam.invRMatrix[i] = invRMatrix[i];
}
// 更新配置中的相机校准参数
configResult.algorithmParams.planeCalibParam.SetCameraCalibParam(cameraParam);
// 保存配置
bool saveResult = m_vrConfig->SaveConfig(configPath.toStdString(), configResult);
if (!saveResult) {
LOG_ERROR("Failed to save config with leveling results\n");
return false;
}
LOG_INFO("Leveling results saved successfully for camera %d (%s)\n", cameraIndex, cameraName.toUtf8().constData());
LOG_INFO("Plane height: %.3f\n", planeHeight);
LOG_INFO("Calibration marked as completed\n");
return true;
} catch (const std::exception& e) {
LOG_ERROR("Exception in SaveLevelingResults: %s\n", e.what());
return false;
}
}
bool WorkpiecePresenter::LoadLevelingResults(int cameraIndex, const QString& cameraName,
double planeCalib[9], double& planeHeight, double invRMatrix[9])
{
try {
if (!m_vrConfig) {
LOG_ERROR("Config is null, cannot load calibration data\n");
return false;
}
// 加载配置文件
QString configPath = PathManager::GetInstance().GetConfigFilePath();
ConfigResult configResult;
int ret = m_vrConfig->LoadConfig(configPath.toStdString(), configResult);
if (ret != LOAD_CONFIG_SUCCESS) {
LOG_ERROR("Failed to load config file, error code: %d\n", ret);
return false;
}
// 获取指定相机的标定参数
VrCameraPlaneCalibParam cameraParamValue;
if (!configResult.algorithmParams.planeCalibParam.GetCameraCalibParam(cameraIndex, cameraParamValue) || !cameraParamValue.isCalibrated) {
LOG_INFO("No calibration data found for camera %d (%s)\n", cameraIndex, cameraName.toUtf8().constData());
return false;
}
// 复制标定数据
for (int i = 0; i < 9; i++) {
planeCalib[i] = cameraParamValue.planeCalib[i];
invRMatrix[i] = cameraParamValue.invRMatrix[i];
}
planeHeight = cameraParamValue.planeHeight;
LOG_INFO("Calibration data loaded successfully for camera %d (%s)\n", cameraIndex, cameraName.toUtf8().constData());
LOG_INFO("Plane height: %.3f\n", planeHeight);
return true;
} catch (const std::exception& e) {
LOG_ERROR("Exception in LoadLevelingResults: %s\n", e.what());
return false;
}
}
// ============ 实现 IConfigCommandHandler 接口 ============
bool WorkpiecePresenter::OnCameraExposeCommand(const CameraConfigParam& param)
{
LOG_INFO("Applying camera expose setting: camera %d, expose time: %.2f\n",
param.cameraIndex, param.exposeTime);
if (param.cameraIndex == -1) {
// 应用到所有相机
for (size_t i = 0; i < m_vrEyeDeviceList.size(); i++) {
IVrEyeDevice* device = m_vrEyeDeviceList[i].second;
if (device && param.exposeTime > 0) {
unsigned int exposeTime = static_cast<unsigned int>(param.exposeTime);
device->SetEyeExpose(exposeTime);
}
}
} else {
// 应用到指定相机
int arrayIndex = param.cameraIndex - 1;
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* device = m_vrEyeDeviceList[arrayIndex].second;
if (device && param.exposeTime > 0) {
unsigned int exposeTime = static_cast<unsigned int>(param.exposeTime);
device->SetEyeExpose(exposeTime);
}
}
}
return true;
}
bool WorkpiecePresenter::OnCameraGainCommand(const CameraConfigParam& param)
{
LOG_INFO("Applying camera gain setting: camera %d, gain: %.2f\n",
param.cameraIndex, param.gain);
if (param.cameraIndex == -1) {
// 应用到所有相机
for (size_t i = 0; i < m_vrEyeDeviceList.size(); i++) {
IVrEyeDevice* device = m_vrEyeDeviceList[i].second;
if (device && param.gain > 0) {
unsigned int gain = static_cast<unsigned int>(param.gain);
device->SetEyeGain(gain);
}
}
} else {
// 应用到指定相机
int arrayIndex = param.cameraIndex - 1;
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* device = m_vrEyeDeviceList[arrayIndex].second;
if (device && param.gain > 0) {
unsigned int gain = static_cast<unsigned int>(param.gain);
device->SetEyeGain(gain);
}
}
}
return true;
}
bool WorkpiecePresenter::OnCameraFrameRateCommand(const CameraConfigParam& param)
{
LOG_INFO("Applying camera frame rate setting: camera %d, frame rate: %.2f\n",
param.cameraIndex, param.frameRate);
if (param.cameraIndex == -1) {
// 应用到所有相机
for (size_t i = 0; i < m_vrEyeDeviceList.size(); i++) {
IVrEyeDevice* device = m_vrEyeDeviceList[i].second;
if (device && param.frameRate > 0) {
int frameRate = static_cast<int>(param.frameRate);
device->SetFrame(frameRate);
}
}
} else {
// 应用到指定相机
int arrayIndex = param.cameraIndex - 1;
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* device = m_vrEyeDeviceList[arrayIndex].second;
if (device && param.frameRate > 0) {
int frameRate = static_cast<int>(param.frameRate);
device->SetFrame(frameRate);
}
}
}
return true;
}
bool WorkpiecePresenter::OnCameraSwingCommand(const SwingConfigParam& param)
{
LOG_INFO("Applying camera swing setting: camera %d, speed: %.2f, angles: %.2f-%.2f\n",
param.cameraIndex, param.swingSpeed, param.startAngle, param.stopAngle);
if (param.cameraIndex == -1) {
// 应用到所有相机
for (size_t i = 0; i < m_vrEyeDeviceList.size(); i++) {
IVrEyeDevice* device = m_vrEyeDeviceList[i].second;
if (device) {
if (param.swingSpeed > 0) {
float swingSpeed = static_cast<float>(param.swingSpeed);
device->SetSwingSpeed(swingSpeed);
}
if (param.startAngle != param.stopAngle) {
float startAngle = static_cast<float>(param.startAngle);
float stopAngle = static_cast<float>(param.stopAngle);
device->SetSwingAngle(startAngle, stopAngle);
}
}
}
} else {
// 应用到指定相机
int arrayIndex = param.cameraIndex - 1;
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* device = m_vrEyeDeviceList[arrayIndex].second;
if (device) {
if (param.swingSpeed > 0) {
float swingSpeed = static_cast<float>(param.swingSpeed);
device->SetSwingSpeed(swingSpeed);
}
if (param.startAngle != param.stopAngle) {
float startAngle = static_cast<float>(param.startAngle);
float stopAngle = static_cast<float>(param.stopAngle);
device->SetSwingAngle(startAngle, stopAngle);
}
}
}
}
return true;
}
bool WorkpiecePresenter::OnAlgoParamCommand(const AlgoConfigParam& param)
{
LOG_INFO("Applying algorithm parameter: %s = %.3f\n", param.paramName, param.paramValue);
// 更新算法参数(具体实现取决于算法库接口)
// 这里只是示例,实际需要根据 paramName 更新对应的参数
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) {
pStatus->OnStatusUpdate("算法参数已更新");
}
return true;
}
bool WorkpiecePresenter::OnCalibParamCommand(const CalibConfigParam& param)
{
LOG_INFO("Applying calibration parameter for camera %d\n", param.cameraIndex);
// 更新标定参数(具体实现取决于标定数据结构)
// 这里只是示例
return true;
}
bool WorkpiecePresenter::OnFullConfigCommand(const FullConfigParam& param)
{
LOG_INFO("Applying full configuration update\n");
// 重新加载完整配置
QString configPath = PathManager::GetInstance().GetConfigFilePath();
if (m_vrConfig) {
int ret = m_vrConfig->LoadConfig(configPath.toStdString(), m_configResult);
if (ret == LOAD_CONFIG_SUCCESS) {
// 重新初始化算法参数
InitAlgoParams();
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) {
pStatus->OnStatusUpdate("配置已重新加载");
}
return true;
}
}
return false;
}