#include "BinocularMarkPresenter.h" #include "VrLog.h" #include "VrError.h" #include #include #include #include #include #include #include #include #include #include #include "VrTimeUtils.h" #include "PathManager.h" BinocularMarkPresenter::BinocularMarkPresenter(QObject *parent) : QObject(parent) , m_pLeftCamera(nullptr) , m_pRightCamera(nullptr) , m_pCameraReconnectTimer(nullptr) , m_bCameraConnected(false) , m_bAutoStartDetection(false) , m_bIsDetecting(false) , m_bSendContinuousResult(false) , m_bSendContinuousImage(false) , m_bIsProcessingImage(false) , m_bLeftImageReady(false) , m_bRightImageReady(false) , m_nServerPort(5901) , m_nLeftCameraIndex(0) , m_nRightCameraIndex(1) , m_fExposureTime(10000.0) , m_fGain(1.0) , m_disparityOffset(0.0) { // 初始化GalaxyImageData m_leftImageData.pData = nullptr; m_leftImageData.width = 0; m_leftImageData.height = 0; m_leftImageData.dataSize = 0; m_leftImageData.pixelFormat = 0; m_leftImageData.frameID = 0; m_leftImageData.timestamp = 0; m_rightImageData.pData = nullptr; m_rightImageData.width = 0; m_rightImageData.height = 0; m_rightImageData.dataSize = 0; m_rightImageData.pixelFormat = 0; m_rightImageData.frameID = 0; m_rightImageData.timestamp = 0; // 初始化Q矩阵为单位矩阵 4x4 m_Q = cv::Mat::eye(4, 4, CV_64F); // 初始化Mark配置(默认值) m_markInfo.patternSize = cv::Size(3, 3); // 3x3 mark m_markInfo.checkerSize = 60.0f; // 60mm m_markInfo.markerSize = 45.0f; // 45mm m_markInfo.dictType = 1; // DICT_6x6 // 初始化Board配置(默认值) m_boardInfo.totalBoardNum = 10; // 默认1个board m_boardInfo.boardIdInterval = 8; // 间隔8 m_boardInfo.boardChaucoIDNum = 4; // 3x3有4个charuco // 创建相机重连定时器(5秒间隔) m_pCameraReconnectTimer = new QTimer(this); m_pCameraReconnectTimer->setInterval(5000); connect(m_pCameraReconnectTimer, &QTimer::timeout, this, &BinocularMarkPresenter::onCameraReconnectTimer); LOG_INFO("BinocularMarkPresenter created\n"); } BinocularMarkPresenter::~BinocularMarkPresenter() { // 停止重连定时器 if (m_pCameraReconnectTimer) { m_pCameraReconnectTimer->stop(); } stopDetection(); closeCameras(); // 释放图像数据 if (m_leftImageData.pData != nullptr) { delete[] m_leftImageData.pData; m_leftImageData.pData = nullptr; } if (m_rightImageData.pData != nullptr) { delete[] m_rightImageData.pData; m_rightImageData.pData = nullptr; } LOG_INFO("BinocularMarkPresenter destroyed\n"); } int BinocularMarkPresenter::initCameras() { LOG_INFO("Initializing cameras...\n"); // 尝试连接相机 int ret = tryConnectCameras(); if (ret == SUCCESS) { // 连接成功 m_bCameraConnected.store(true); emit cameraConnectionChanged(true); LOG_INFO("Cameras initialized successfully\n"); return SUCCESS; } // 连接失败,启动重连定时器 LOG_WARN("Failed to connect cameras, will retry every 5 seconds...\n"); m_pCameraReconnectTimer->start(); emit cameraConnectionChanged(false); return ERR_CODE(DEV_OPEN_ERR); } int BinocularMarkPresenter::tryConnectCameras() { // 如果正在检测,先停止,并标记需要自动重启 bool wasDetecting = m_bIsDetecting.load(); if (wasDetecting) { LOG_INFO("Stopping detection before camera reconnection...\n"); stopDetection(); m_bAutoStartDetection.store(true); // 标记重连成功后自动启动 } // 清理之前的相机对象 if (m_pLeftCamera != nullptr) { m_pLeftCamera->CloseDevice(); delete m_pLeftCamera; m_pLeftCamera = nullptr; } if (m_pRightCamera != nullptr) { m_pRightCamera->CloseDevice(); delete m_pRightCamera; m_pRightCamera = nullptr; } // 创建左相机对象 int ret = IGalaxyDevice::CreateObject(&m_pLeftCamera); if (ret != 0 || m_pLeftCamera == nullptr) { LOG_ERROR("Failed to create left camera object\n"); return ERR_CODE(APP_ERR_EXEC); } // 创建右相机对象 ret = IGalaxyDevice::CreateObject(&m_pRightCamera); if (ret != 0 || m_pRightCamera == nullptr) { LOG_ERROR("Failed to create right camera object\n"); delete m_pLeftCamera; m_pLeftCamera = nullptr; return ERR_CODE(APP_ERR_EXEC); } // 初始化Galaxy SDK ret = m_pLeftCamera->InitSDK(); if (ret != SUCCESS) { LOG_ERROR("Failed to initialize Galaxy SDK [%d]\n", ret); delete m_pLeftCamera; delete m_pRightCamera; m_pLeftCamera = nullptr; m_pRightCamera = nullptr; return ERR_CODE(APP_ERR_EXEC); } // 枚举设备 std::vector deviceList; ret = m_pLeftCamera->EnumerateDevices(deviceList, 2000); if (ret != 0 || deviceList.size() < 2) { LOG_WARN("Failed to enumerate cameras or less than 2 cameras found (found: %zu) [%d]\n", deviceList.size(), ret); delete m_pLeftCamera; delete m_pRightCamera; m_pLeftCamera = nullptr; m_pRightCamera = nullptr; return ERR_CODE(APP_ERR_EXEC); } LOG_INFO("Found %zu cameras:\n", deviceList.size()); for (size_t i = 0; i < deviceList.size(); i++) { LOG_INFO(" [%zu] SN: %s, Model: %s, Name: %s\n", i, deviceList[i].serialNumber.c_str(), deviceList[i].modelName.c_str(), deviceList[i].displayName.c_str()); } // 打开左相机(优先使用序列号) if (!m_strLeftCameraSerial.empty()) { LOG_INFO("Opening left camera by serial number: %s\n", m_strLeftCameraSerial.c_str()); ret = m_pLeftCamera->OpenDevice(m_strLeftCameraSerial); if (ret != SUCCESS) { LOG_ERROR("Failed to open left camera by serial number: %s\n", m_strLeftCameraSerial.c_str()); delete m_pLeftCamera; delete m_pRightCamera; m_pLeftCamera = nullptr; m_pRightCamera = nullptr; return ERR_CODE(APP_ERR_EXEC); } LOG_INFO("Left camera opened successfully (SN: %s)\n", m_strLeftCameraSerial.c_str()); } else { LOG_INFO("Opening left camera by index: %u\n", m_nLeftCameraIndex); ret = m_pLeftCamera->OpenDeviceByIndex(m_nLeftCameraIndex); if (ret != SUCCESS) { LOG_ERROR("Failed to open left camera (index: %u)\n", m_nLeftCameraIndex); delete m_pLeftCamera; delete m_pRightCamera; m_pLeftCamera = nullptr; m_pRightCamera = nullptr; return ERR_CODE(APP_ERR_EXEC); } LOG_INFO("Left camera opened successfully (index: %u)\n", m_nLeftCameraIndex); } // 初始化右相机SDK(每个设备对象需要独立初始化) ret = m_pRightCamera->InitSDK(); if (ret != SUCCESS) { LOG_ERROR("Failed to initialize Galaxy SDK for right camera\n"); m_pLeftCamera->CloseDevice(); delete m_pLeftCamera; delete m_pRightCamera; m_pLeftCamera = nullptr; m_pRightCamera = nullptr; return ERR_CODE(APP_ERR_EXEC); } // 打开右相机(优先使用序列号) if (!m_strRightCameraSerial.empty()) { LOG_INFO("Opening right camera by serial number: %s\n", m_strRightCameraSerial.c_str()); ret = m_pRightCamera->OpenDevice(m_strRightCameraSerial); if (ret != SUCCESS) { LOG_ERROR("Failed to open right camera by serial number: %s\n", m_strRightCameraSerial.c_str()); m_pLeftCamera->CloseDevice(); delete m_pLeftCamera; delete m_pRightCamera; m_pLeftCamera = nullptr; m_pRightCamera = nullptr; return ERR_CODE(APP_ERR_EXEC); } LOG_INFO("Right camera opened successfully (SN: %s)\n", m_strRightCameraSerial.c_str()); } else { LOG_INFO("Opening right camera by index: %u\n", m_nRightCameraIndex); ret = m_pRightCamera->OpenDeviceByIndex(m_nRightCameraIndex); if (ret != SUCCESS) { LOG_ERROR("Failed to open right camera (index: %u)\n", m_nRightCameraIndex); m_pLeftCamera->CloseDevice(); delete m_pLeftCamera; delete m_pRightCamera; m_pLeftCamera = nullptr; m_pRightCamera = nullptr; return ERR_CODE(APP_ERR_EXEC); } LOG_INFO("Right camera opened successfully (index: %u)\n", m_nRightCameraIndex); } // 设置左相机参数 m_pLeftCamera->SetExposureTime(m_fExposureTime); m_pLeftCamera->SetGain(m_fGain); m_pLeftCamera->SetTriggerMode(false); // 连续采集模式 // 设置右相机参数 m_pRightCamera->SetExposureTime(m_fExposureTime); m_pRightCamera->SetGain(m_fGain); m_pRightCamera->SetTriggerMode(false); // 连续采集模式 LOG_INFO("Cameras connected successfully\n"); return SUCCESS; } void BinocularMarkPresenter::closeCameras() { if (m_pLeftCamera != nullptr) { m_pLeftCamera->CloseDevice(); delete m_pLeftCamera; m_pLeftCamera = nullptr; LOG_INFO("Left camera closed\n"); } if (m_pRightCamera != nullptr) { m_pRightCamera->CloseDevice(); delete m_pRightCamera; m_pRightCamera = nullptr; LOG_INFO("Right camera closed\n"); } m_bCameraConnected.store(false); } void BinocularMarkPresenter::onCameraReconnectTimer() { // 尝试连接相机 int ret = tryConnectCameras(); if (ret == SUCCESS) { // 连接成功,停止定时器 m_pCameraReconnectTimer->stop(); m_bCameraConnected.store(true); emit cameraConnectionChanged(true); LOG_INFO("Camera reconnection successful!\n"); // 如果之前尝试启动检测但失败了,现在自动重新启动 if (m_bAutoStartDetection.load()) { LOG_INFO("Auto-starting detection after camera reconnection...\n"); m_bAutoStartDetection.store(false); // 重置标志 startDetection(); } } else { // 连接失败,定时器会继续运行 LOG_WARN("Camera reconnection failed, will retry in 5 seconds...\n"); } } int BinocularMarkPresenter::startDetection() { if (m_bIsDetecting.load()) { LOG_WARN("Detection already started\n"); return SUCCESS; } if (m_pLeftCamera == nullptr || m_pRightCamera == nullptr) { LOG_WARN("Cameras not initialized, will auto-start detection after camera reconnection\n"); // 设置标志,待相机重连成功后自动启动检测 m_bAutoStartDetection.store(true); return ERR_CODE(APP_ERR_EXEC); } // 1. 注册左相机图像回调(使用lambda捕获this指针) int ret = m_pLeftCamera->RegisterImageCallback( [this](const GalaxyImageData& imageData) { this->leftCameraCallback(imageData); } ); if (ret != SUCCESS) { LOG_ERROR("Failed to register left camera callback\n"); return ERR_CODE(APP_ERR_EXEC); } // 2. 注册右相机图像回调 ret = m_pRightCamera->RegisterImageCallback( [this](const GalaxyImageData& imageData) { this->rightCameraCallback(imageData); } ); if (ret != SUCCESS) { LOG_ERROR("Failed to register right camera callback\n"); m_pLeftCamera->UnregisterImageCallback(); return ERR_CODE(APP_ERR_EXEC); } // 3. 启动左相机采集 ret = m_pLeftCamera->StartAcquisition(); if (ret != SUCCESS) { LOG_ERROR("Failed to start left camera acquisition\n"); m_pLeftCamera->UnregisterImageCallback(); m_pRightCamera->UnregisterImageCallback(); return ERR_CODE(APP_ERR_EXEC); } // 4. 启动右相机采集 ret = m_pRightCamera->StartAcquisition(); if (ret != SUCCESS) { LOG_ERROR("Failed to start right camera acquisition\n"); m_pLeftCamera->StopAcquisition(); m_pLeftCamera->UnregisterImageCallback(); m_pRightCamera->UnregisterImageCallback(); return ERR_CODE(APP_ERR_EXEC); } m_bIsDetecting.store(true); m_bAutoStartDetection.store(false); // 清除自动启动标志 // 5. 启动采集线程 m_captureThread = std::thread(&BinocularMarkPresenter::captureThreadFunc, this); LOG_INFO("Detection started (callbacks registered and cameras acquisition started)\n"); return SUCCESS; } void BinocularMarkPresenter::stopDetection() { if (!m_bIsDetecting.load()) return; m_bIsDetecting.store(false); m_bAutoStartDetection.store(false); // 清除自动启动标志(用户主动停止) // 1. 等待线程结束 if (m_captureThread.joinable()) { m_captureThread.join(); } // 2. 停止相机采集 if (m_pLeftCamera != nullptr) { m_pLeftCamera->StopAcquisition(); } if (m_pRightCamera != nullptr) { m_pRightCamera->StopAcquisition(); } // 3. 取消注册图像回调 if (m_pLeftCamera != nullptr) { m_pLeftCamera->UnregisterImageCallback(); } if (m_pRightCamera != nullptr) { m_pRightCamera->UnregisterImageCallback(); } LOG_INFO("Detection stopped (cameras acquisition stopped and callbacks unregistered)\n"); } void BinocularMarkPresenter::leftCameraCallback(const GalaxyImageData& imageData) { QMutexLocker locker(&m_imageMutex); // 检查是否需要重新分配内存 if (m_leftImageData.dataSize != imageData.dataSize) { // 大小不同,需要重新分配 if (m_leftImageData.pData != nullptr) { delete[] m_leftImageData.pData; m_leftImageData.pData = nullptr; } if (imageData.dataSize > 0) { m_leftImageData.pData = new unsigned char[imageData.dataSize]; } } // 复制图像数据 m_leftImageData.width = imageData.width; m_leftImageData.height = imageData.height; m_leftImageData.dataSize = imageData.dataSize; m_leftImageData.pixelFormat = imageData.pixelFormat; m_leftImageData.frameID = imageData.frameID; m_leftImageData.timestamp = imageData.timestamp; if (m_leftImageData.pData != nullptr && imageData.pData != nullptr) { memcpy(m_leftImageData.pData, imageData.pData, imageData.dataSize); } m_bLeftImageReady = true; } void BinocularMarkPresenter::rightCameraCallback(const GalaxyImageData& imageData) { QMutexLocker locker(&m_imageMutex); // 检查是否需要重新分配内存 if (m_rightImageData.dataSize != imageData.dataSize) { // 大小不同,需要重新分配 if (m_rightImageData.pData != nullptr) { delete[] m_rightImageData.pData; m_rightImageData.pData = nullptr; } if (imageData.dataSize > 0) { m_rightImageData.pData = new unsigned char[imageData.dataSize]; } } // 复制图像数据 m_rightImageData.width = imageData.width; m_rightImageData.height = imageData.height; m_rightImageData.dataSize = imageData.dataSize; m_rightImageData.pixelFormat = imageData.pixelFormat; m_rightImageData.frameID = imageData.frameID; m_rightImageData.timestamp = imageData.timestamp; if (m_rightImageData.pData != nullptr && imageData.pData != nullptr) { memcpy(m_rightImageData.pData, imageData.pData, imageData.dataSize); } m_bRightImageReady = true; } void BinocularMarkPresenter::captureThreadFunc() { LOG_INFO("Capture thread started\n"); while (m_bIsDetecting.load()) { // 等待双目图像都准备好 bool leftReady = false; bool rightReady = false; { QMutexLocker locker(&m_imageMutex); leftReady = m_bLeftImageReady; rightReady = m_bRightImageReady; } if (leftReady && rightReady) { // 帧率限制:限制为5fps(200ms间隔) auto now = std::chrono::steady_clock::now(); auto elapsed = std::chrono::duration_cast(now - m_lastImageSendTime).count(); if (elapsed < 200) { LOG_DEBUG("Drop frame: frame rate limit (elapsed: %lld ms)\n", elapsed); // 清除就绪标志 { QMutexLocker locker(&m_imageMutex); m_bLeftImageReady = false; m_bRightImageReady = false; } continue; } // 更新发送时间 m_lastImageSendTime = now; // 处理图像并进行检测 processImages(); // 清除就绪标志 { QMutexLocker locker(&m_imageMutex); m_bLeftImageReady = false; m_bRightImageReady = false; } } else { // 等待图像 std::this_thread::sleep_for(std::chrono::milliseconds(2)); } } LOG_INFO("Capture thread stopped\n"); } void BinocularMarkPresenter::processImages() { // 在互斥锁中转换为 cv::Mat(深拷贝) cv::Mat leftImage, rightImage; { QMutexLocker locker(&m_imageMutex); leftImage = imageDataToMat(m_leftImageData); rightImage = imageDataToMat(m_rightImageData); } // 如果启用持续图像流,直接发送图像,不做检测 if (m_bSendContinuousImage) { emit detectionResult(std::vector(), leftImage, rightImage, 0); LOG_DEBUG("Sent continuous image stream\n"); return; } // 调用算法进行检测 std::vector marks; // 使用新的算法接口 wd_BQ_getCharuco3DMark( leftImage, rightImage, m_cameraMatrixL, m_distCoeffsL, m_cameraMatrixR, m_distCoeffsR, m_R1, m_R2, m_P1, m_P2, m_Q, m_markInfo, m_boardInfo, m_disparityOffset, marks ); // 只有在启用持续发送时才发送检测结果 if (m_bSendContinuousResult) { int errCode = marks.empty() ? -1 : 0; emit detectionResult(marks, leftImage, rightImage, errCode); LOG_DEBUG("Sent continuous detection result, detected %zu marks\n", marks.size()); } } cv::Mat BinocularMarkPresenter::imageDataToMat(const GalaxyImageData& imageData) { // 根据像素格式创建cv::Mat // 假设是单通道8位灰度图或3通道BGR图像 int cvType = CV_8UC1; // 默认灰度图 // 这里需要根据imageData.pixelFormat判断类型 // 简化处理:假设是灰度图 if (imageData.dataSize == imageData.width * imageData.height) { cvType = CV_8UC1; // 灰度图 } else if (imageData.dataSize == imageData.width * imageData.height * 3) { cvType = CV_8UC3; // BGR图 } cv::Mat image(imageData.height, imageData.width, cvType); // 复制数据 memcpy(image.data, imageData.pData, imageData.dataSize); return image; } bool BinocularMarkPresenter::loadConfiguration(const QString& configFilePath) { m_calibrationFilePath = PathManager::GetInstance().GetAppConfigDirectory() + "/StereoCamera.xml"; QFile file(configFilePath); if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) { LOG_WARN("Failed to open config file: %s\n", configFilePath.toStdString().c_str()); // 即使配置文件不存在,也要加载标定文件 if (!loadCalibration(m_calibrationFilePath)) { LOG_WARN("Failed to load calibration file: %s (will use default values)\n", m_calibrationFilePath.toStdString().c_str()); } return false; } QDomDocument doc; QString errorMsg; int errorLine, errorColumn; if (!doc.setContent(&file, &errorMsg, &errorLine, &errorColumn)) { LOG_ERROR("Failed to parse config XML: %s (Line: %d, Column: %d)\n", errorMsg.toStdString().c_str(), errorLine, errorColumn); file.close(); return false; } file.close(); QDomElement root = doc.documentElement(); if (root.tagName() != "BinocularMarkConfig") { LOG_ERROR("Invalid config file: root element should be 'BinocularMarkConfig'\n"); return false; } LOG_DEBUG("========== 开始读取配置文件: %s ==========\n", configFilePath.toStdString().c_str()); // 读取TCP服务器配置 QDomElement serverConfig = root.firstChildElement("ServerConfig"); if (!serverConfig.isNull()) { m_nServerPort = static_cast(serverConfig.attribute("port", "5901").toInt()); LOG_DEBUG(" [ServerConfig] port = %d\n", m_nServerPort); } else { LOG_DEBUG(" [ServerConfig] 未找到,使用默认端口: %d\n", m_nServerPort); } // 读取相机配置 QDomElement cameras = root.firstChildElement("Cameras"); if (!cameras.isNull()) { LOG_DEBUG(" [Cameras] 开始读取相机配置...\n"); QDomElement camera = cameras.firstChildElement("Camera"); while (!camera.isNull()) { int index = camera.attribute("index", "0").toInt(); QString serialNumber = camera.attribute("serialNumber", ""); double exposureTime = camera.attribute("exposureTime", "10000.0").toDouble(); double gain = camera.attribute("gain", "1.0").toDouble(); LOG_DEBUG(" Camera[%d]: serialNumber=%s, exposureTime=%.2f, gain=%.2f\n", index, serialNumber.toStdString().c_str(), exposureTime, gain); if (index == 0) { m_nLeftCameraIndex = 0; m_strLeftCameraSerial = serialNumber.toStdString(); m_fExposureTime = exposureTime; m_fGain = gain; LOG_DEBUG(" -> 设置为左相机"); if (!m_strLeftCameraSerial.empty()) { LOG_DEBUG(" (序列号: %s)", m_strLeftCameraSerial.c_str()); } LOG_DEBUG("\n"); } else if (index == 1) { m_nRightCameraIndex = 1; m_strRightCameraSerial = serialNumber.toStdString(); LOG_DEBUG(" -> 设置为右相机"); if (!m_strRightCameraSerial.empty()) { LOG_DEBUG(" (序列号: %s)", m_strRightCameraSerial.c_str()); } LOG_DEBUG("\n"); // 假设左右相机使用相同的曝光和增益 } camera = camera.nextSiblingElement("Camera"); } } else { LOG_DEBUG(" [Cameras] 未找到,使用默认相机配置\n"); } // 读取Mark板参数 QDomElement markInfo = root.firstChildElement("MarkInfo"); if (!markInfo.isNull()) { m_markInfo.patternSize.width = markInfo.attribute("patternWidth", "3").toInt(); m_markInfo.patternSize.height = markInfo.attribute("patternHeight", "3").toInt(); m_markInfo.checkerSize = markInfo.attribute("checkerSize", "60.0").toFloat(); m_markInfo.markerSize = markInfo.attribute("markerSize", "45.0").toFloat(); m_markInfo.dictType = markInfo.attribute("dictType", "1").toInt(); LOG_DEBUG(" [MarkInfo]\n"); LOG_DEBUG(" patternSize: %d x %d\n", m_markInfo.patternSize.width, m_markInfo.patternSize.height); LOG_DEBUG(" checkerSize: %.2f mm\n", m_markInfo.checkerSize); LOG_DEBUG(" markerSize: %.2f mm\n", m_markInfo.markerSize); LOG_DEBUG(" dictType: %d\n", m_markInfo.dictType); } else { LOG_DEBUG(" [MarkInfo] 未找到,使用默认值\n"); } // 读取Board配置 QDomElement boardInfo = root.firstChildElement("BoardInfo"); if (!boardInfo.isNull()) { m_boardInfo.totalBoardNum = boardInfo.attribute("totalBoardNum", "1").toInt(); m_boardInfo.boardIdInterval = boardInfo.attribute("boardIdInterval", "8").toInt(); m_boardInfo.boardChaucoIDNum = boardInfo.attribute("boardCharucoIdNum", "4").toInt(); LOG_DEBUG(" [BoardInfo]\n"); LOG_DEBUG(" totalBoardNum: %d\n", m_boardInfo.totalBoardNum); LOG_DEBUG(" boardIdInterval: %d\n", m_boardInfo.boardIdInterval); LOG_DEBUG(" boardCharucoIdNum: %d\n", m_boardInfo.boardChaucoIDNum); } else { LOG_DEBUG(" [BoardInfo] 未找到,使用默认值\n"); } // 读取算法参数 QDomElement algoParams = root.firstChildElement("AlgorithmParams"); if (!algoParams.isNull()) { m_disparityOffset = algoParams.attribute("disparityOffset", "0.0").toDouble(); LOG_DEBUG(" [AlgorithmParams]\n"); LOG_DEBUG(" disparityOffset: %.6f\n", m_disparityOffset); } else { LOG_DEBUG(" [AlgorithmParams] 未找到,使用默认值\n"); } // 读取标定文件路径(可选,如果没有则使用默认路径) QDomElement calibFileElem = root.firstChildElement("CalibrationFile"); QString calibFile; if (!calibFileElem.isNull()) { calibFile = calibFileElem.attribute("path", ""); LOG_DEBUG(" [CalibrationFile] path (原始) = %s\n", calibFile.toStdString().c_str()); } // 如果没有配置或配置为空,使用默认路径 if (calibFile.isEmpty()) { QFileInfo configFileInfo(configFilePath); calibFile = configFileInfo.dir().absoluteFilePath("StereoCamera.xml"); LOG_DEBUG(" [CalibrationFile] 使用默认路径: %s\n", calibFile.toStdString().c_str()); } // 如果是相对路径,则相对于配置文件所在目录 else if (QFileInfo(calibFile).isRelative()) { QFileInfo configFileInfo(configFilePath); calibFile = configFileInfo.dir().absoluteFilePath(calibFile); LOG_DEBUG(" [CalibrationFile] path (绝对路径) = %s\n", calibFile.toStdString().c_str()); } LOG_DEBUG("========== 配置文件读取完成 ==========\n\n"); // 保存标定文件路径 m_calibrationFilePath = calibFile; // 加载标定文件(如果文件不存在,只警告不返回失败) if (!loadCalibration(calibFile)) { LOG_WARN("Failed to load calibration file: %s (will use default values)\n", calibFile.toStdString().c_str()); } LOG_INFO("Configuration loaded from %s\n", configFilePath.toStdString().c_str()); LOG_INFO(" Server port: %d\n", m_nServerPort); LOG_INFO(" Camera indices: L=%d, R=%d\n", m_nLeftCameraIndex, m_nRightCameraIndex); LOG_INFO(" Mark pattern: %dx%d, checker=%fmm, marker=%fmm\n", m_markInfo.patternSize.width, m_markInfo.patternSize.height, m_markInfo.checkerSize, m_markInfo.markerSize); LOG_INFO(" Board info: total=%d, interval=%d, charucoNum=%d\n", m_boardInfo.totalBoardNum, m_boardInfo.boardIdInterval, m_boardInfo.boardChaucoIDNum); LOG_INFO(" Disparity offset: %f\n", m_disparityOffset); return true; } bool BinocularMarkPresenter::loadCalibration(const QString& calibFilePath) { LOG_DEBUG("========== 开始加载标定文件: %s ==========\n", calibFilePath.toStdString().c_str()); // 使用OpenCV的FileStorage读取XML文件 cv::FileStorage fs(calibFilePath.toStdString(), cv::FileStorage::READ); if (!fs.isOpened()) { LOG_ERROR("Failed to open calibration file: %s\n", calibFilePath.toStdString().c_str()); return false; } // 读取左相机参数 cv::FileNode leftCameraNode = fs["LeftCamera"]; if (!leftCameraNode.empty()) { LOG_DEBUG(" [LeftCamera]\n"); leftCameraNode["CameraMatrix"] >> m_cameraMatrixL; leftCameraNode["DistCoeffs"] >> m_distCoeffsL; if (!m_cameraMatrixL.empty()) { LOG_DEBUG(" CameraMatrix (3x3):\n"); LOG_DEBUG(" [%.6f, %.6f, %.6f]\n", m_cameraMatrixL.at(0, 0), m_cameraMatrixL.at(0, 1), m_cameraMatrixL.at(0, 2)); LOG_DEBUG(" [%.6f, %.6f, %.6f]\n", m_cameraMatrixL.at(1, 0), m_cameraMatrixL.at(1, 1), m_cameraMatrixL.at(1, 2)); LOG_DEBUG(" [%.6f, %.6f, %.6f]\n", m_cameraMatrixL.at(2, 0), m_cameraMatrixL.at(2, 1), m_cameraMatrixL.at(2, 2)); } if (!m_distCoeffsL.empty()) { std::stringstream ss; ss << " DistCoeffs (" << m_distCoeffsL.cols << "个系数): ["; for (int i = 0; i < m_distCoeffsL.cols; i++) { ss << std::fixed << std::setprecision(6) << m_distCoeffsL.at(0, i); if (i < m_distCoeffsL.cols - 1) ss << ", "; } ss << "]"; LOG_DEBUG("%s\n", ss.str().c_str()); } } else { LOG_ERROR(" [LeftCamera] 节点未找到!\n"); } // 读取右相机参数 cv::FileNode rightCameraNode = fs["RightCamera"]; if (!rightCameraNode.empty()) { LOG_DEBUG(" [RightCamera] \n"); rightCameraNode["CameraMatrix"] >> m_cameraMatrixR; rightCameraNode["DistCoeffs"] >> m_distCoeffsR; if (!m_cameraMatrixR.empty()) { LOG_DEBUG(" CameraMatrix (3x3):\n"); LOG_DEBUG(" [%.6f, %.6f, %.6f]\n", m_cameraMatrixR.at(0, 0), m_cameraMatrixR.at(0, 1), m_cameraMatrixR.at(0, 2)); LOG_DEBUG(" [%.6f, %.6f, %.6f]\n", m_cameraMatrixR.at(1, 0), m_cameraMatrixR.at(1, 1), m_cameraMatrixR.at(1, 2)); LOG_DEBUG(" [%.6f, %.6f, %.6f]\n", m_cameraMatrixR.at(1, 0), m_cameraMatrixR.at(1, 1), m_cameraMatrixR.at(1, 2)); LOG_DEBUG(" [%.6f, %.6f, %.6f]\n", m_cameraMatrixR.at(2, 0), m_cameraMatrixR.at(2, 1), m_cameraMatrixR.at(2, 2)); } if (!m_distCoeffsR.empty()) { std::stringstream ss; ss << " DistCoeffs (" << m_distCoeffsR.cols << "个系数): ["; for (int i = 0; i < m_distCoeffsR.cols; i++) { ss << std::fixed << std::setprecision(6) << m_distCoeffsR.at(0, i); if (i < m_distCoeffsR.cols - 1) ss << ", "; } ss << "]"; LOG_DEBUG("%s\n", ss.str().c_str()); } } else { LOG_ERROR(" [RightCamera] 节点未找到!\n"); } // 读取双目标定参数(用于参考,算法主要使用校正后的参数) cv::FileNode stereoNode = fs["Stereo"]; if (!stereoNode.empty()) { double baseline = stereoNode["Baseline"]; LOG_DEBUG(" [Stereo] baseline = %.6f mm\n", baseline); } else { LOG_DEBUG(" [Stereo] 节点未找到\n"); } // 读取校正参数(Rectification) cv::FileNode rectNode = fs["Rectification"]; if (!rectNode.empty()) { LOG_DEBUG(" [Rectification] 节点找到\n"); rectNode["R1"] >> m_R1; rectNode["R2"] >> m_R2; rectNode["P1"] >> m_P1; rectNode["P2"] >> m_P2; rectNode["Q"] >> m_Q; if (!m_R1.empty()) { LOG_DEBUG(" R1 (3x3):\n"); std::stringstream ss; for (int i = 0; i < 3; i++) { ss.str(""); ss << " ["; ss << std::fixed << std::setprecision(6) << m_R1.at(i, 0) << ", "; ss << m_R1.at(i, 1) << ", "; ss << m_R1.at(i, 2) << "]"; LOG_DEBUG("%s\n", ss.str().c_str()); } } if (!m_R2.empty()) { LOG_DEBUG(" R2 (3x3):\n"); std::stringstream ss; for (int i = 0; i < 3; i++) { ss.str(""); ss << " ["; ss << std::fixed << std::setprecision(6) << m_R2.at(i, 0) << ", "; ss << m_R2.at(i, 1) << ", "; ss << m_R2.at(i, 2) << "]"; LOG_DEBUG("%s\n", ss.str().c_str()); } } if (!m_P1.empty()) { LOG_DEBUG(" P1 (3x4):\n"); std::stringstream ss; for (int i = 0; i < 3; i++) { ss.str(""); ss << " ["; ss << std::fixed << std::setprecision(6) << m_P1.at(i, 0) << ", "; ss << m_P1.at(i, 1) << ", "; ss << m_P1.at(i, 2) << ", "; ss << m_P1.at(i, 3) << "]"; LOG_DEBUG("%s\n", ss.str().c_str()); } } if (!m_P2.empty()) { LOG_DEBUG(" P2 (3x4):\n"); std::stringstream ss; for (int i = 0; i < 3; i++) { ss.str(""); ss << " ["; ss << std::fixed << std::setprecision(6) << m_P2.at(i, 0) << ", "; ss << m_P2.at(i, 1) << ", "; ss << m_P2.at(i, 2) << ", "; ss << m_P2.at(i, 3) << "]"; LOG_DEBUG("%s\n", ss.str().c_str()); } } if (!m_Q.empty()) { LOG_DEBUG(" Q (4x4):\n"); std::stringstream ss; for (int i = 0; i < 4; i++) { ss.str(""); ss << " ["; ss << std::fixed << std::setprecision(6) << m_Q.at(i, 0) << ", "; ss << m_Q.at(i, 1) << ", "; ss << m_Q.at(i, 2) << ", "; ss << m_Q.at(i, 3) << "]"; LOG_DEBUG("%s\n", ss.str().c_str()); } } } else { LOG_ERROR(" [Rectification] 节点未找到!\n"); } fs.release(); // 验证关键参数是否加载成功 bool success = !m_cameraMatrixL.empty() && !m_cameraMatrixR.empty() && !m_distCoeffsL.empty() && !m_distCoeffsR.empty() && !m_R1.empty() && !m_R2.empty() && !m_P1.empty() && !m_P2.empty() && !m_Q.empty(); if (success) { LOG_DEBUG("========== 标定文件加载成功 ==========\n\n"); LOG_INFO("Calibration loaded successfully from %s\n", calibFilePath.toStdString().c_str()); } else { LOG_ERROR("========== 标定文件加载失败或不完整 ==========\n\n"); LOG_ERROR("Calibration file incomplete or invalid\n"); } return success; } void BinocularMarkPresenter::handleSingleDetection(const TCPClient* pClient) { LOG_INFO("Handle single detection request\n"); if (!m_pLeftCamera || !m_pRightCamera) { LOG_WARN("Cameras not initialized\n"); emit singleDetectionResult(pClient, std::vector(), cv::Mat(), cv::Mat(), -1); return; } GalaxyImageData leftImageData, rightImageData; memset(&leftImageData, 0, sizeof(GalaxyImageData)); memset(&rightImageData, 0, sizeof(GalaxyImageData)); // 顺序取图 int ret1 = m_pLeftCamera->CaptureImage(leftImageData, 5000); int ret2 = m_pRightCamera->CaptureImage(rightImageData, 5000); if (ret1 != 0 || ret2 != 0) { LOG_WARN("Failed to capture images: left=%d, right=%d\n", ret1, ret2); if (leftImageData.pData) delete[] leftImageData.pData; if (rightImageData.pData) delete[] rightImageData.pData; emit singleDetectionResult(pClient, std::vector(), cv::Mat(), cv::Mat(), -1); return; } // 并行转换图像 auto leftFuture = std::async(std::launch::async, [&]() { return imageDataToMat(leftImageData); }); auto rightFuture = std::async(std::launch::async, [&]() { return imageDataToMat(rightImageData); }); cv::Mat leftImage = leftFuture.get(); cv::Mat rightImage = rightFuture.get(); LOG_DEBUG("Image captured: left=%dx%d, right=%dx%d\n", leftImage.cols, leftImage.rows, rightImage.cols, rightImage.rows); delete[] leftImageData.pData; delete[] rightImageData.pData; std::vector marks; wd_BQ_getCharuco3DMark(leftImage, rightImage, m_cameraMatrixL, m_distCoeffsL, m_cameraMatrixR, m_distCoeffsR, m_R1, m_R2, m_P1, m_P2, m_Q, m_markInfo, m_boardInfo, m_disparityOffset, marks); int errCode = marks.empty() ? ERR_CODE(DEV_RESULT_EMPTY) : 0; emit singleDetectionResult(pClient, marks, leftImage, rightImage, errCode); LOG_INFO("Single detection completed, detected %zu marks\n", marks.size()); } void BinocularMarkPresenter::handleSingleImage(const TCPClient* pClient) { LOG_INFO("Handle single image request\n"); if (!m_pLeftCamera || !m_pRightCamera) { LOG_WARN("Cameras not initialized\n"); emit singleImageResult(pClient, cv::Mat(), cv::Mat()); return; } CVrTimeUtils oTimeUtils; GalaxyImageData leftImageData, rightImageData; memset(&leftImageData, 0, sizeof(GalaxyImageData)); memset(&rightImageData, 0, sizeof(GalaxyImageData)); // 顺序取图 int ret1 = m_pLeftCamera->CaptureImage(leftImageData, 5000); int ret2 = m_pRightCamera->CaptureImage(rightImageData, 5000); if (ret1 != 0 || ret2 != 0) { LOG_WARN("Failed to capture images: left=%d, right=%d\n", ret1, ret2); if (leftImageData.pData) delete[] leftImageData.pData; if (rightImageData.pData) delete[] rightImageData.pData; emit singleImageResult(pClient, cv::Mat(), cv::Mat()); return; } // 并行转换图像 auto leftFuture = std::async(std::launch::async, [&]() { return imageDataToMat(leftImageData); }); auto rightFuture = std::async(std::launch::async, [&]() { return imageDataToMat(rightImageData); }); cv::Mat leftImage = leftFuture.get(); cv::Mat rightImage = rightFuture.get(); delete[] leftImageData.pData; delete[] rightImageData.pData; double timeCost = oTimeUtils.GetElapsedTimeInMilliSec(); emit singleImageResult(pClient, leftImage, rightImage); LOG_DEBUG("Image captured: left=%dx%d, right=%dx%d, getimage:%.3f ms time:%.3f ms\n", leftImage.cols, leftImage.rows, rightImage.cols, rightImage.rows, timeCost, oTimeUtils.GetElapsedTimeInMilliSec()); } void BinocularMarkPresenter::handleStartWork() { LOG_INFO("Handle start work request\n"); if (!m_bIsDetecting) { int ret = startDetection(); if (ret != 0) { LOG_ERROR("Failed to start detection, error code: %d\n", ret); return; } LOG_INFO("Detection thread started\n"); } m_bSendContinuousResult = true; LOG_INFO("Continuous result sending enabled\n"); } void BinocularMarkPresenter::handleStopWork() { LOG_INFO("Handle stop work request\n"); m_bSendContinuousResult = false; LOG_INFO("Continuous result sending disabled\n"); if (m_bIsDetecting) { stopDetection(); LOG_INFO("Detection thread stopped\n"); } } void BinocularMarkPresenter::handleStartContinuousImage() { LOG_INFO("Handle start continuous image request\n"); if (!m_bIsDetecting) { int ret = startDetection(); if (ret != 0) { LOG_ERROR("Failed to start detection, error code: %d\n", ret); return; } LOG_INFO("Detection thread started\n"); } m_bSendContinuousImage = true; LOG_INFO("Continuous image sending enabled\n"); } void BinocularMarkPresenter::handleStopContinuousImage() { LOG_INFO("Handle stop continuous image request\n"); m_bSendContinuousImage = false; LOG_INFO("Continuous image sending disabled\n"); // 停止持续图像流时,无条件停止检测线程 if (m_bIsDetecting) { stopDetection(); LOG_INFO("Detection thread stopped\n"); } } void BinocularMarkPresenter::handleSetExposureTime(double exposureTime) { LOG_INFO("Handle set exposure time: %.2f\n", exposureTime); m_fExposureTime = exposureTime; if (m_pLeftCamera) { m_pLeftCamera->SetExposureTime(m_fExposureTime); } if (m_pRightCamera) { m_pRightCamera->SetExposureTime(m_fExposureTime); } LOG_INFO("Exposure time updated successfully\n"); } void BinocularMarkPresenter::handleSetGain(double gain) { LOG_INFO("Handle set gain: %.2f\n", gain); m_fGain = gain; if (m_pLeftCamera) { m_pLeftCamera->SetGain(m_fGain); } if (m_pRightCamera) { m_pRightCamera->SetGain(m_fGain); } LOG_INFO("Gain updated successfully\n"); } void BinocularMarkPresenter::handleSetLeftExposureTime(double exposureTime) { LOG_INFO("Handle set left camera exposure time: %.2f\n", exposureTime); if (m_pLeftCamera) { m_pLeftCamera->SetExposureTime(exposureTime); LOG_INFO("Left camera exposure time updated successfully\n"); } else { LOG_WARN("Left camera not initialized\n"); } } void BinocularMarkPresenter::handleSetRightExposureTime(double exposureTime) { LOG_INFO("Handle set right camera exposure time: %.2f\n", exposureTime); if (m_pRightCamera) { m_pRightCamera->SetExposureTime(exposureTime); LOG_INFO("Right camera exposure time updated successfully\n"); } else { LOG_WARN("Right camera not initialized\n"); } } void BinocularMarkPresenter::handleSetLeftGain(double gain) { LOG_INFO("Handle set left camera gain: %.2f\n", gain); if (m_pLeftCamera) { m_pLeftCamera->SetGain(gain); LOG_INFO("Left camera gain updated successfully\n"); } else { LOG_WARN("Left camera not initialized\n"); } } void BinocularMarkPresenter::handleSetRightGain(double gain) { LOG_INFO("Handle set right camera gain: %.2f\n", gain); if (m_pRightCamera) { m_pRightCamera->SetGain(gain); LOG_INFO("Right camera gain updated successfully\n"); } else { LOG_WARN("Right camera not initialized\n"); } } void BinocularMarkPresenter::handleGetCameraInfo(const TCPClient* pClient, const QString& camera) { LOG_INFO("Handle get camera info: %s\n", camera.toStdString().c_str()); IGalaxyDevice* pCamera = nullptr; if (camera == "left") { pCamera = m_pLeftCamera; } else if (camera == "right") { pCamera = m_pRightCamera; } if (!pCamera) { LOG_WARN("Camera not initialized: %s\n", camera.toStdString().c_str()); emit cameraInfoResult(pClient, camera, "", "", "", 0.0, 0.0); return; } // 获取相机信息 GalaxyDeviceInfo deviceInfo; int ret = pCamera->GetDeviceInfo(deviceInfo); if (ret != 0) { LOG_WARN("Failed to get camera info: %s\n", camera.toStdString().c_str()); emit cameraInfoResult(pClient, camera, "", "", "", 0.0, 0.0); return; } // 获取当前曝光时间和增益 double exposureTime = 0.0; double gain = 0.0; pCamera->GetExposureTime(exposureTime); pCamera->GetGain(gain); // 发送相机信息 emit cameraInfoResult(pClient, camera, QString::fromStdString(deviceInfo.serialNumber), QString::fromStdString(deviceInfo.modelName), QString::fromStdString(deviceInfo.displayName), exposureTime, gain); LOG_INFO("Camera info sent: %s, SN=%s, Model=%s, Exposure=%.2f, Gain=%.2f\n", camera.toStdString().c_str(), deviceInfo.serialNumber.c_str(), deviceInfo.modelName.c_str(), exposureTime, gain); } void BinocularMarkPresenter::handleGetCalibration(const TCPClient* pClient) { LOG_INFO("Handle get calibration request\n"); if (m_calibrationFilePath.isEmpty()) { LOG_WARN("Calibration file path not set\n"); emit calibrationMatrixResult(pClient, ""); return; } QFile file(m_calibrationFilePath); if (!file.open(QIODevice::ReadOnly | QIODevice::Text)) { LOG_WARN("Failed to open calibration file: %s\n", m_calibrationFilePath.toStdString().c_str()); emit calibrationMatrixResult(pClient, ""); return; } QTextStream in(&file); QString calibrationXml = in.readAll(); file.close(); emit calibrationMatrixResult(pClient, calibrationXml); LOG_INFO("Calibration matrix sent from: %s\n", m_calibrationFilePath.toStdString().c_str()); } void BinocularMarkPresenter::handleSetCalibration(const TCPClient* pClient, const QString& calibrationXml) { LOG_INFO("Handle set calibration request : %s\n", m_calibrationFilePath.toStdString().c_str()); if (m_calibrationFilePath.isEmpty()) { LOG_ERROR("Calibration file path not set\n"); return; } QFile file(m_calibrationFilePath); if (!file.open(QIODevice::WriteOnly | QIODevice::Text)) { LOG_ERROR("Failed to open calibration file for writing: %s\n", m_calibrationFilePath.toStdString().c_str()); return; } QTextStream out(&file); out << calibrationXml; file.close(); LOG_INFO("Calibration matrix saved to: %s\n", m_calibrationFilePath.toStdString().c_str()); // 重新加载标定文件 if (loadCalibration(m_calibrationFilePath)) { LOG_INFO("Calibration reloaded successfully\n"); } else { LOG_ERROR("Failed to reload calibration\n"); } }