GrabBag/App/Workpiece/WorkpieceApp/Presenter/Src/WorkpiecePresenter.cpp

580 lines
23 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#include "WorkpiecePresenter.h"
#include "VrError.h"
#include "VrLog.h"
#include <QtCore/QCoreApplication>
#include <QtCore/QFileInfo>
#include <QtCore/QDir>
#include <QtCore/QString>
#include <QtCore/QStandardPaths>
#include <QtCore/QFile>
#include <QtCore/QDateTime>
#include <cmath>
#include <algorithm>
#include <QImage>
#include <QThread>
#include <atomic>
#include <QJsonObject>
#include <QJsonArray>
#include "Version.h"
#include "VrTimeUtils.h"
#include "VrDateUtils.h"
#include "SG_baseDataType.h"
#include "VrConvert.h"
#include "TCPServerProtocol.h"
#include "DetectPresenter.h"
#include "PathManager.h"
WorkpiecePresenter::WorkpiecePresenter(QObject *parent)
: BasePresenter(parent)
, m_vrConfig(nullptr)
, m_pDetectPresenter(nullptr)
, m_pTCPServer(nullptr)
, m_bTCPConnected(false)
{
// 基类已经创建了相机重连定时器和检测数据缓存
}
WorkpiecePresenter::~WorkpiecePresenter()
{
// 基类会自动处理:相机重连定时器、算法检测线程、检测数据缓存、相机设备资源
// 停止配置管理器
if (m_pConfigManager) {
m_pConfigManager->Shutdown();
m_pConfigManager.reset();
}
// 释放TCP服务器
if (m_pTCPServer) {
m_pTCPServer->Deinitialize();
delete m_pTCPServer;
m_pTCPServer = nullptr;
}
// 释放检测处理器
if(m_pDetectPresenter)
{
delete m_pDetectPresenter;
m_pDetectPresenter = nullptr;
}
// 释放配置对象
if (m_vrConfig) {
delete m_vrConfig;
m_vrConfig = nullptr;
}
}
int WorkpiecePresenter::InitApp()
{
LOG_DEBUG("Start APP Version: %s\n", WORKPIECE_FULL_VERSION_STRING);
// 初始化连接状态
SetWorkStatus(WorkStatus::InitIng);
m_pDetectPresenter = new DetectPresenter();
// 获取配置文件路径
QString configPath = PathManager::GetInstance().GetConfigFilePath();
int nRet = SUCCESS;
// 初始化配置管理器ConfigManager 内部会创建 IVrConfig 实例)
m_pConfigManager = std::make_unique<ConfigManager>();
if (!m_pConfigManager->Initialize(configPath.toStdString())) {
LOG_ERROR("Failed to initialize ConfigManager\n");
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("配置管理器初始化失败");
return ERR_CODE(DEV_OPEN_ERR);
}
LOG_INFO("ConfigManager initialized successfully\n");
// 从 ConfigManager 获取配置结果
ConfigResult configResult = m_pConfigManager->GetConfigResult();
// 获取 IVrConfig 实例用于配置改变通知
m_vrConfig = m_pConfigManager->GetVrConfigInstance();
// 设置配置改变通知回调
if (m_vrConfig) {
m_vrConfig->SetConfigChangeNotify(this);
}
// 调用基类InitCamera进行相机初始化bRGB=false, bSwing=true
InitCamera(configResult.cameraList, false, true);
LOG_INFO("Camera initialization completed. Connected cameras: %zu, default camera index: %d\n",
m_vrEyeDeviceList.size(), m_currentCameraIndex);
// 初始化TCP服务器
nRet = InitTCPServer();
if (nRet != 0) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("TCP服务器初始化失败");
m_bTCPConnected = false;
} else {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("TCP服务器初始化成功");
}
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("设备初始化完成");
CheckAndUpdateWorkStatus();
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("配置初始化成功");
return SUCCESS;
}
// 初始化算法参数实现BasePresenter纯虚函数
int WorkpiecePresenter::InitAlgoParams()
{
LOG_DEBUG("initializing algorithm parameters\n");
QString exePath = QCoreApplication::applicationFilePath();
// 清空现有的手眼标定矩阵列表
m_clibMatrixList.clear();
// 获取手眼标定文件路径并确保文件存在
QString clibPath = PathManager::GetInstance().GetCalibrationFilePath();
LOG_INFO("Loading hand-eye matrices from: %s\n", clibPath.toStdString().c_str());
// 读取存在的矩阵数量
int nExistMatrixNum = CVrConvert::GetClibMatrixCount(clibPath.toStdString().c_str());
LOG_INFO("Found %d hand-eye calibration matrices\n", nExistMatrixNum);
// 循环加载每个矩阵
for(int matrixIndex = 0; matrixIndex < nExistMatrixNum; matrixIndex++)
{
// 构造矩阵标识符
char matrixIdent[64];
#ifdef _WIN32
sprintf_s(matrixIdent, "CalibMatrixInfo_%d", matrixIndex);
#else
sprintf(matrixIdent, "CalibMatrixInfo_%d", matrixIndex);
#endif
// 创建新的标定矩阵结构
CalibMatrix calibMatrix;
// 初始化为单位矩阵
double initClibMatrix[16] = {
1.0, 0.0, 0.0, 0.0, // 第一行
0.0, 1.0, 0.0, 0.0, // 第二行
0.0, 0.0, 1.0, 0.0, // 第三行
0.0, 0.0, 0.0, 1.0 // 第四行
};
// 加载矩阵数据
bool loadSuccess = CVrConvert::LoadClibMatrix(clibPath.toStdString().c_str(), matrixIdent, "dCalibMatrix", calibMatrix.clibMatrix);
if(loadSuccess)
{
m_clibMatrixList.push_back(calibMatrix);
LOG_INFO("Successfully loaded matrix %d\n", matrixIndex);
// 输出矩阵内容
QString clibMatrixStr;
LOG_INFO("Matrix %d content:\n", matrixIndex);
for (int i = 0; i < 4; ++i) {
clibMatrixStr.clear();
for (int j = 0; j < 4; ++j) {
clibMatrixStr += QString::asprintf("%8.4f ", calibMatrix.clibMatrix[i * 4 + j]);
}
LOG_INFO(" %s\n", clibMatrixStr.toStdString().c_str());
}
}
else
{
LOG_WARNING("Failed to load matrix %d, using identity matrix\n", matrixIndex);
// 如果加载失败,使用单位矩阵
memcpy(calibMatrix.clibMatrix, initClibMatrix, sizeof(initClibMatrix));
m_clibMatrixList.push_back(calibMatrix);
}
}
LOG_INFO("Total loaded %zu hand-eye calibration matrices\n", m_clibMatrixList.size());
// 从 ConfigManager 获取已加载的配置(避免重复加载)
ConfigResult configResult = m_pConfigManager->GetConfigResult();
// 如果 ConfigManager 配置无效或 m_vrConfig 为空,尝试直接加载
if (configResult.cameraList.empty() && m_vrConfig) {
LOG_WARNING("ConfigResult from ConfigManager is empty, trying direct load\n");
QString configPath = PathManager::GetInstance().GetConfigFilePath();
LOG_INFO("Loading config: %s\n", configPath.toStdString().c_str());
int ret = m_vrConfig->LoadConfig(configPath.toStdString(), configResult);
if (ret != LOAD_CONFIG_SUCCESS) {
LOG_ERROR("Failed to load config file directly, error code: %d\n", ret);
}
}
const VrAlgorithmParams& xmlParams = configResult.algorithmParams;
// 保存调试参数
m_debugParam = configResult.debugParam;
LOG_INFO("Loaded XML params - Workpiece: lineLen=%.1f\n", xmlParams.workpieceParam.lineLen);
LOG_INFO("Loaded XML params - Filter: continuityTh=%.1f, outlierTh=%.1f\n", xmlParams.filterParam.continuityTh, xmlParams.filterParam.outlierTh);
// 直接使用配置结构
m_algorithmParams = xmlParams;
LOG_INFO("Algorithm parameters initialized successfully:\n");
LOG_INFO(" Workpiece: lineLen=%.1f\n",
m_algorithmParams.workpieceParam.lineLen);
// 循环打印所有相机的调平参数(添加安全检查)
LOG_INFO("Loading plane calibration parameters for all cameras:\n");
if (!m_algorithmParams.planeCalibParam.cameraCalibParams.empty()) {
for (const auto& cameraParam : m_algorithmParams.planeCalibParam.cameraCalibParams) {
try {
LOG_INFO("Camera %d (%s) calibration parameters:\n",
cameraParam.cameraIndex, cameraParam.cameraName.c_str());
LOG_INFO(" Is calibrated: %s\n", cameraParam.isCalibrated ? "YES" : "NO");
LOG_INFO(" Plane height: %.3f\n", cameraParam.planeHeight);
LOG_INFO(" Plane calibration matrix:\n");
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[0], cameraParam.planeCalib[1], cameraParam.planeCalib[2]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[3], cameraParam.planeCalib[4], cameraParam.planeCalib[5]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.planeCalib[6], cameraParam.planeCalib[7], cameraParam.planeCalib[8]);
LOG_INFO(" Inverse rotation matrix:\n");
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[0], cameraParam.invRMatrix[1], cameraParam.invRMatrix[2]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[3], cameraParam.invRMatrix[4], cameraParam.invRMatrix[5]);
LOG_INFO(" [%.3f, %.3f, %.3f]\n", cameraParam.invRMatrix[6], cameraParam.invRMatrix[7], cameraParam.invRMatrix[8]);
LOG_INFO(" --------------------------------\n");
} catch (const std::exception& e) {
LOG_ERROR("Exception while printing camera calibration parameters: %s\n", e.what());
} catch (...) {
LOG_ERROR("Unknown exception while printing camera calibration parameters\n");
}
}
} else {
LOG_WARNING("No plane calibration parameters found in configuration\n");
}
return SUCCESS;
}
// 手眼标定矩阵管理方法实现
CalibMatrix WorkpiecePresenter::GetClibMatrix(int index) const
{
CalibMatrix clibMatrix;
double initClibMatrix[16] = {
1.0, 0.0, 0.0, 0.0, // 第一行
0.0, 1.0, 0.0, 0.0, // 第二行
0.0, 0.0, 1.0, 0.0, // 第三行
0.0, 0.0, 0.0, 1.0 // 第四行
};
memcpy(clibMatrix.clibMatrix, initClibMatrix, sizeof(initClibMatrix));
if (index >= 0 && index < static_cast<int>(m_clibMatrixList.size())) {
clibMatrix = m_clibMatrixList[index];
memcpy(clibMatrix.clibMatrix, m_clibMatrixList[index].clibMatrix, sizeof(initClibMatrix));
} else {
LOG_WARNING("Invalid hand-eye calibration matrix\n");
}
return clibMatrix;
}
void WorkpiecePresenter::CheckAndUpdateWorkStatus()
{
if (m_bCameraConnected) {
SetWorkStatus(WorkStatus::Ready);
} else {
SetWorkStatus(WorkStatus::Error);
}
}
// 实现BasePresenter纯虚函数执行算法检测
int WorkpiecePresenter::ProcessAlgoDetection(std::vector<std::pair<EVzResultDataType, SVzLaserLineData>>& detectionDataCache)
{
LOG_INFO("[Algo Thread] Start real detection task using algorithm\n");
// 1. 获取缓存的点云数据(已由基类验证非空)
unsigned int lineNum = detectionDataCache.size();
if(GetStatusCallback<IYWorkpieceStatus>()){
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("扫描线数:" + std::to_string(lineNum) + ",正在算法检测...");
}
// 检查检测处理器是否已初始化
if (!m_pDetectPresenter) {
LOG_ERROR("DetectPresenter is null, cannot proceed with detection\n");
if (GetStatusCallback<IYWorkpieceStatus>()) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("检测处理器未初始化");
}
return ERR_CODE(DEV_NOT_FIND);
}
CVrTimeUtils oTimeUtils;
// 获取当前使用的手眼标定矩阵
const CalibMatrix currentClibMatrix = GetClibMatrix(m_currentCameraIndex - 1);
DetectionResult detectionResult;
int nRet = m_pDetectPresenter->DetectWorkpiece(m_currentCameraIndex, detectionDataCache,
m_algorithmParams, m_debugParam, m_dataLoader,
currentClibMatrix.clibMatrix, detectionResult);
// 根据项目类型选择处理方式
if (GetStatusCallback<IYWorkpieceStatus>()) {
QString err = QString("错误:%1").arg(nRet);
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate(QString("检测%1").arg(SUCCESS == nRet ? "成功": err).toStdString());
}
LOG_INFO("[Algo Thread] sx_getWorkpiecePostion detected %zu objects time : %.2f ms\n", detectionResult.positions.size(), oTimeUtils.GetElapsedTimeInMilliSec());
ERR_CODE_RETURN(nRet);
// 8. 通知UI检测结果
detectionResult.cameraIndex = m_currentCameraIndex;
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) {
pStatus->OnDetectionResult(detectionResult);
}
// 更新状态
QString statusMsg = QString("检测完成,发现%1个目标").arg(detectionResult.positions.size());
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate(statusMsg.toStdString());
// 发送检测结果给TCP客户端
_SendDetectionResultToTCP(detectionResult, m_currentCameraIndex);
// 9. 检测完成后,将工作状态更新为"完成"
SetWorkStatus(WorkStatus::Completed);
// 恢复到就绪状态
SetWorkStatus(WorkStatus::Ready);
return SUCCESS;
}
// 实现配置改变通知接口
void WorkpiecePresenter::OnConfigChanged(const ConfigResult& configResult)
{
LOG_INFO("Configuration changed notification received, reloading algorithm parameters\n");
// 更新调试参数
m_debugParam = configResult.debugParam;
// 重新初始化算法参数
int result = InitAlgoParams();
if (result == SUCCESS) {
LOG_INFO("Algorithm parameters reloaded successfully after config change\n");
if (GetStatusCallback<IYWorkpieceStatus>()) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("配置已更新,算法参数重新加载成功");
}
} else {
LOG_ERROR("Failed to reload algorithm parameters after config change, error: %d\n", result);
if (GetStatusCallback<IYWorkpieceStatus>()) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("配置更新后算法参数重新加载失败");
}
}
}
// 根据相机索引获取调平参数
SSG_planeCalibPara WorkpiecePresenter::_GetCameraCalibParam(int cameraIndex)
{
// 查找指定相机索引的调平参数
SSG_planeCalibPara calibParam;
// 使用单位矩阵(未校准状态)
double identityMatrix[9] = {1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0};
for (int i = 0; i < 9; i++) {
calibParam.planeCalib[i] = identityMatrix[i];
calibParam.invRMatrix[i] = identityMatrix[i];
}
calibParam.planeHeight = -1.0; // 使用默认高度
for (const auto& cameraParam : m_algorithmParams.planeCalibParam.cameraCalibParams) {
if (cameraParam.cameraIndex == cameraIndex) {
// 根据isCalibrated标志决定使用标定矩阵还是单位矩阵
if (cameraParam.isCalibrated) {
// 使用实际的标定矩阵
for (int i = 0; i < 9; i++) {
calibParam.planeCalib[i] = cameraParam.planeCalib[i];
calibParam.invRMatrix[i] = cameraParam.invRMatrix[i];
}
calibParam.planeHeight = cameraParam.planeHeight;
}
}
}
return calibParam;
}
// 实现IConfigChangeListener接口
void WorkpiecePresenter::OnSystemConfigChanged(const SystemConfig& config)
{
LOG_INFO("System configuration changed, applying new configuration\n");
// 更新内部配置状态
if (m_vrConfig) {
// 可以选择性地重新初始化算法参数
InitAlgoParams();
}
if (GetStatusCallback<IYWorkpieceStatus>()) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("系统配置已更新");
}
}
void WorkpiecePresenter::OnCameraParamChanged(int cameraIndex, const CameraUIParam& cameraParam)
{
LOG_INFO("Camera %d parameters changed: expose=%.2f, gain=%.2f, frameRate=%.2f\n",
cameraIndex, cameraParam.exposeTime, cameraParam.gain, cameraParam.frameRate);
// 应用相机参数到实际设备
if (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) {
IVrEyeDevice* device = m_vrEyeDeviceList[cameraIndex - 1].second;
if (device) {
// 设置曝光时间
if (cameraParam.exposeTime > 0) {
unsigned int exposeTime = static_cast<unsigned int>(cameraParam.exposeTime);
int ret = device->SetEyeExpose(exposeTime);
if (ret == SUCCESS) {
LOG_INFO("Set expose time %.2f for camera %d successfully\n", cameraParam.exposeTime, cameraIndex);
} else {
LOG_ERROR("Failed to set expose time for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置增益
if (cameraParam.gain > 0) {
unsigned int gain = static_cast<unsigned int>(cameraParam.gain);
int ret = device->SetEyeGain(gain);
if (ret == SUCCESS) {
LOG_INFO("Set gain %.2f for camera %d successfully\n", cameraParam.gain, cameraIndex);
} else {
LOG_ERROR("Failed to set gain for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置帧率
if (cameraParam.frameRate > 0) {
int frameRate = static_cast<int>(cameraParam.frameRate);
int ret = device->SetFrame(frameRate);
if (ret == SUCCESS) {
LOG_INFO("Set frame rate %.2f for camera %d successfully\n", cameraParam.frameRate, cameraIndex);
} else {
LOG_ERROR("Failed to set frame rate for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置摆动速度
if (cameraParam.swingSpeed > 0) {
float swingSpeed = static_cast<float>(cameraParam.swingSpeed);
int ret = device->SetSwingSpeed(swingSpeed);
if (ret == SUCCESS) {
LOG_INFO("Set swing speed %.2f for camera %d successfully\n", cameraParam.swingSpeed, cameraIndex);
} else {
LOG_ERROR("Failed to set swing speed for camera %d, error: %d\n", cameraIndex, ret);
}
}
// 设置摆动角度
if (cameraParam.swingStartAngle != cameraParam.swingStopAngle) {
float startAngle = static_cast<float>(cameraParam.swingStartAngle);
float stopAngle = static_cast<float>(cameraParam.swingStopAngle);
int ret = device->SetSwingAngle(startAngle, stopAngle);
if (ret == SUCCESS) {
LOG_INFO("Set swing angle %.2f-%.2f for camera %d successfully\n",
cameraParam.swingStartAngle, cameraParam.swingStopAngle, cameraIndex);
} else {
LOG_ERROR("Failed to set swing angle for camera %d, error: %d\n", cameraIndex, ret);
}
}
}
}
if (GetStatusCallback<IYWorkpieceStatus>()) {
QString cameraName = (cameraIndex >= 1 && cameraIndex <= static_cast<int>(m_vrEyeDeviceList.size())) ?
QString::fromStdString(m_vrEyeDeviceList[cameraIndex - 1].first) : QString("相机%1").arg(cameraIndex);
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate(QString("%1参数已更新").arg(cameraName).toStdString());
}
}
void WorkpiecePresenter::OnAlgorithmParamChanged(const VrAlgorithmParams& algorithmParams)
{
LOG_INFO("Algorithm parameters changed, updating internal configuration\n");
// 直接更新算法参数
m_algorithmParams = algorithmParams;
LOG_INFO("Updated algorithm parameters:\n");
LOG_INFO(" Workpiece: lineLen=%.1f\n",
m_algorithmParams.workpieceParam.lineLen);
LOG_INFO(" TreeGrow: yDeviation_max=%.1f, zDeviation_max=%.1f, maxLineSkipNum=%d\n",
m_algorithmParams.growParam.yDeviation_max, m_algorithmParams.growParam.zDeviation_max, m_algorithmParams.growParam.maxLineSkipNum);
LOG_INFO(" Corner: cornerTh=%.3f, jumpCornerTh_1=%.3f, jumpCornerTh_2=%.3f\n",
m_algorithmParams.cornerParam.cornerTh, m_algorithmParams.cornerParam.jumpCornerTh_1, m_algorithmParams.cornerParam.jumpCornerTh_2);
if (GetStatusCallback<IYWorkpieceStatus>()) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate("算法参数已更新");
}
}
// 实现BasePresenter纯虚函数相机状态变化通知
void WorkpiecePresenter::OnCameraStatusChanged(int cameraIndex, bool isConnected)
{
LOG_INFO("Camera %d status changed: %s\n", cameraIndex, isConnected ? "connected" : "disconnected");
// 通知UI更新相机状态
if (GetStatusCallback<IYWorkpieceStatus>()) {
if (cameraIndex == 1) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnCamera1StatusChanged(isConnected);
} else if (cameraIndex == 2) {
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnCamera2StatusChanged(isConnected);
}
// 获取相机名称用于状态消息
QString cameraName;
int arrayIndex = cameraIndex - 1;
if (arrayIndex >= 0 && arrayIndex < static_cast<int>(m_vrEyeDeviceList.size())) {
cameraName = QString::fromStdString(m_vrEyeDeviceList[arrayIndex].first);
} else {
cameraName = QString("相机%1").arg(cameraIndex);
}
QString statusMsg = QString("%1%2").arg(cameraName).arg(isConnected ? "已连接" : "已断开");
if (auto pStatus = GetStatusCallback<IYWorkpieceStatus>()) pStatus->OnStatusUpdate(statusMsg.toStdString());
}
// 检查并更新工作状态
CheckAndUpdateWorkStatus();
}
// 实现BasePresenter虚函数工作状态变化通知
void WorkpiecePresenter::OnWorkStatusChanged(WorkStatus status)
{
auto pStatus = GetStatusCallback<IYWorkpieceStatus>();
if (pStatus) {
pStatus->OnWorkStatusChanged(status);
}
}
// 实现BasePresenter虚函数相机数量变化通知
void WorkpiecePresenter::OnCameraCountChanged(int count)
{
auto pStatus = GetStatusCallback<IYWorkpieceStatus>();
if (pStatus) {
pStatus->OnCameraCountChanged(count);
}
}
// 实现BasePresenter虚函数状态文字更新通知
void WorkpiecePresenter::OnStatusUpdate(const std::string& statusMessage)
{
auto pStatus = GetStatusCallback<IYWorkpieceStatus>();
if (pStatus) {
pStatus->OnStatusUpdate(statusMessage);
}
}