GrabBag/App/BinocularMark/BinocularMarkApp/BinocularMarkTcpProtocol.cpp
2025-12-20 16:18:12 +08:00

838 lines
27 KiB
C++
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#include "BinocularMarkTcpProtocol.h"
#include "VrLog.h"
#include <QDateTime>
#include <QBuffer>
#include <QFile>
#include <QTextStream>
#include <cstring>
#include <future>
// 静态实例指针
BinocularMarkTcpProtocol* BinocularMarkTcpProtocol::s_pInstance = nullptr;
BinocularMarkTcpProtocol::BinocularMarkTcpProtocol(QObject *parent)
: QObject(parent)
, m_pTcpServer(nullptr)
, m_pHeartbeatTimer(nullptr)
, m_nHeartbeatInterval(30)
, m_nTcpPort(5901)
, m_bIsProcessingFrame(false)
{
s_pInstance = this;
// 创建TCP服务器
VrCreatYTCPServer(&m_pTcpServer);
// 创建心跳定时器
m_pHeartbeatTimer = new QTimer(this);
connect(m_pHeartbeatTimer, &QTimer::timeout, this, &BinocularMarkTcpProtocol::onHeartbeatTimeout);
LOG_INFO("BinocularMarkTcpProtocol created\n");
}
BinocularMarkTcpProtocol::~BinocularMarkTcpProtocol()
{
stopHeartbeat();
stopServer();
if (m_pTcpServer != nullptr)
{
delete m_pTcpServer;
m_pTcpServer = nullptr;
}
s_pInstance = nullptr;
LOG_INFO("BinocularMarkTcpProtocol destroyed\n");
}
bool BinocularMarkTcpProtocol::startServer(quint16 port)
{
if (m_pTcpServer == nullptr)
{
LOG_ERROR("TCP server not created\n");
return false;
}
m_nTcpPort = port;
// 初始化TCP服务器
if (!m_pTcpServer->Init(port, false))
{
LOG_ERROR("Failed to init TCP server on port %d\n", port);
return false;
}
// 设置事件回调
m_pTcpServer->SetEventCallback(tcpEventCallback);
// 启动TCP服务器
if (!m_pTcpServer->Start(tcpRecvCallback, false))
{
LOG_ERROR("Failed to start TCP server\n");
return false;
}
LOG_INFO("TCP server started on port %d\n", port);
return true;
}
void BinocularMarkTcpProtocol::stopServer()
{
if (m_pTcpServer != nullptr)
{
m_pTcpServer->Stop();
m_pTcpServer->Close();
LOG_INFO("TCP server stopped\n");
}
// 清空客户端缓冲区
m_clientBuffers.clear();
}
void BinocularMarkTcpProtocol::startHeartbeat(int heartbeatInterval)
{
m_nHeartbeatInterval = heartbeatInterval;
m_pHeartbeatTimer->start(heartbeatInterval * 1000);
LOG_INFO("Heartbeat started, interval: %d seconds\n", heartbeatInterval);
}
void BinocularMarkTcpProtocol::stopHeartbeat()
{
if (m_pHeartbeatTimer != nullptr)
{
m_pHeartbeatTimer->stop();
}
LOG_INFO("Heartbeat stopped\n");
}
void BinocularMarkTcpProtocol::onHeartbeatTimeout()
{
// 发送心跳消息给所有客户端
QJsonObject heartbeatObj;
heartbeatObj["msg_type"] = "heartbeat";
heartbeatObj["timestamp"] = QDateTime::currentMSecsSinceEpoch();
QJsonDocument doc(heartbeatObj);
QByteArray jsonData = doc.toJson(QJsonDocument::Compact);
QByteArray frameData = buildFrame(jsonData);
if (m_pTcpServer != nullptr)
{
m_pTcpServer->SendAllData(frameData.data(), frameData.size());
}
}
void BinocularMarkTcpProtocol::tcpRecvCallback(const TCPClient* pClient, const char* pData, const unsigned int nLen)
{
if (s_pInstance != nullptr)
{
s_pInstance->handleReceivedData(pClient, pData, nLen);
}
}
void BinocularMarkTcpProtocol::tcpEventCallback(const TCPClient* pClient, TCPServerEventType eventType)
{
if (s_pInstance == nullptr)
return;
QString clientId = s_pInstance->generateClientId(pClient);
switch (eventType)
{
case TCP_EVENT_CLIENT_CONNECTED:
LOG_INFO("Client connected: %s\n", clientId.toStdString().c_str());
s_pInstance->m_clientBuffers[clientId].clear();
break;
case TCP_EVENT_CLIENT_DISCONNECTED:
LOG_INFO("Client disconnected: %s\n", clientId.toStdString().c_str());
s_pInstance->m_clientBuffers.remove(clientId);
break;
case TCP_EVENT_CLIENT_EXCEPTION:
LOG_WARN("Client exception: %s\n", clientId.toStdString().c_str());
s_pInstance->m_clientBuffers.remove(clientId);
break;
default:
break;
}
}
void BinocularMarkTcpProtocol::handleReceivedData(const TCPClient* pClient, const char* pData, unsigned int nLen)
{
QString clientId = generateClientId(pClient);
// 将接收到的数据追加到客户端缓冲区
QByteArray receivedData(pData, nLen);
m_clientBuffers[clientId].append(receivedData);
// 解析数据帧(处理粘包)
QList<QByteArray> jsonDataList;
int frameCount = parseFrames(clientId, m_clientBuffers[clientId], jsonDataList);
// 处理每个完整的JSON消息
for (const QByteArray& jsonData : jsonDataList)
{
handleJsonMessage(pClient, jsonData);
}
LOG_DEBUG("Received %u bytes from %s, parsed %d frames\n", nLen, pClient->m_szClientIP, frameCount);
}
QByteArray BinocularMarkTcpProtocol::buildFrame(const QByteArray& jsonData)
{
QByteArray frame;
// 帧头8字节
frame.append(FRAME_HEADER, FRAME_HEADER_SIZE);
// 写入数据长度8位字符串格式64位无符号整数
quint64 dataLength = jsonData.size();
char lengthStr[9]; // 8位数字 + '\0'
#ifdef _WIN32
sprintf_s(lengthStr, 9, "%08llu", dataLength);
#else
snprintf(lengthStr, 9, "%08llu", dataLength);
#endif
frame.append(lengthStr, FRAME_LENGTH_SIZE);
// JSON数据
frame.append(jsonData);
// 帧尾4字节
frame.append(FRAME_TAIL, FRAME_TAIL_SIZE);
return frame;
}
int BinocularMarkTcpProtocol::parseFrames(const QString& clientId, const QByteArray& data, QList<QByteArray>& outJsonData)
{
QByteArray& buffer = m_clientBuffers[clientId];
int frameCount = 0;
while (true)
{
// 检查缓冲区是否有足够数据(至少包含帧头+长度字段)
if (buffer.size() < FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE)
break;
// 查找帧头
int headerPos = buffer.indexOf(FRAME_HEADER, 0);
if (headerPos < 0)
{
// 没有找到帧头,清空缓冲区
buffer.clear();
break;
}
// 如果帧头不在起始位置,丢弃之前的数据
if (headerPos > 0)
{
buffer.remove(0, headerPos);
}
// 检查是否有完整的长度字段
if (buffer.size() < FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE)
break;
// 读取数据长度8字节ASCII字符串格式如 "00001234"
QByteArray lengthStr = buffer.mid(FRAME_HEADER_SIZE, FRAME_LENGTH_SIZE);
bool ok = false;
quint64 dataLength = lengthStr.toULongLong(&ok);
if (!ok)
{
LOG_ERROR("Invalid length string: %s\n", lengthStr.toStdString().c_str());
buffer.remove(0, FRAME_HEADER_SIZE);
continue;
}
// 计算完整帧的长度
quint64 frameLength = FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE + dataLength + FRAME_TAIL_SIZE;
// 检查是否接收到完整的帧
if (buffer.size() < static_cast<int>(frameLength))
break;
// 检查帧尾
int tailPos = FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE + dataLength;
if (buffer.mid(tailPos, FRAME_TAIL_SIZE) != QByteArray(FRAME_TAIL, FRAME_TAIL_SIZE))
{
// 帧尾不匹配,丢弃当前帧头,继续查找下一个帧头
buffer.remove(0, FRAME_HEADER_SIZE);
continue;
}
// 提取JSON数据
QByteArray jsonData = buffer.mid(FRAME_HEADER_SIZE + FRAME_LENGTH_SIZE, dataLength);
outJsonData.append(jsonData);
// 从缓冲区移除已处理的帧
buffer.remove(0, frameLength);
frameCount++;
}
return frameCount;
}
MarkMessageType BinocularMarkTcpProtocol::parseMessageType(const QString& msgTypeStr)
{
if (msgTypeStr == "mark_result")
return MarkMessageType::MARK_RESULT;
else if (msgTypeStr == "heartbeat")
return MarkMessageType::HEARTBEAT;
else if (msgTypeStr == "heartbeat_ack")
return MarkMessageType::HEARTBEAT_ACK;
else if (msgTypeStr == "cmd_trigger")
return MarkMessageType::CMD_TRIGGER;
else if (msgTypeStr == "cmd_single_detection")
return MarkMessageType::CMD_SINGLE_DETECTION;
else if (msgTypeStr == "cmd_single_image")
return MarkMessageType::CMD_SINGLE_IMAGE;
else if (msgTypeStr == "cmd_start_work")
return MarkMessageType::CMD_START_WORK;
else if (msgTypeStr == "cmd_stop_work")
return MarkMessageType::CMD_STOP_WORK;
else if (msgTypeStr == "cmd_start_continuous_image")
return MarkMessageType::CMD_START_CONTINUOUS_IMAGE;
else if (msgTypeStr == "cmd_stop_continuous_image")
return MarkMessageType::CMD_STOP_CONTINUOUS_IMAGE;
else if (msgTypeStr == "cmd_set_calibration")
return MarkMessageType::CMD_SET_CALIBRATION;
else if (msgTypeStr == "cmd_set_exposure_time")
return MarkMessageType::CMD_SET_EXPOSURE_TIME;
else if (msgTypeStr == "cmd_set_gain")
return MarkMessageType::CMD_SET_GAIN;
else if (msgTypeStr == "cmd_get_camera_info")
return MarkMessageType::CMD_GET_CAMERA_INFO;
else if (msgTypeStr == "cmd_response")
return MarkMessageType::CMD_RESPONSE;
else
return MarkMessageType::UNKNOWN;
}
void BinocularMarkTcpProtocol::handleJsonMessage(const TCPClient* pClient, const QByteArray& jsonData)
{
QJsonParseError parseError;
QJsonDocument doc = QJsonDocument::fromJson(jsonData, &parseError);
if (parseError.error != QJsonParseError::NoError)
{
LOG_ERROR("Failed to parse JSON: %s\n", parseError.errorString().toStdString().c_str());
return;
}
if (!doc.isObject())
{
LOG_ERROR("JSON is not an object\n");
return;
}
QJsonObject jsonObj = doc.object();
// 解析消息类型
QString msgTypeStr = jsonObj["msg_type"].toString();
LOG_DEBUG("Received msg_type: %s\n", msgTypeStr.toStdString().c_str());
MarkMessageType msgType = parseMessageType(msgTypeStr);
// 根据消息类型处理
switch (msgType)
{
case MarkMessageType::HEARTBEAT:
handleHeartbeat(pClient, jsonObj);
break;
case MarkMessageType::CMD_TRIGGER:
handleTriggerCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_SINGLE_DETECTION:
handleSingleDetectionCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_SINGLE_IMAGE:
handleSingleImageCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_START_WORK:
handleStartWorkCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_STOP_WORK:
handleStopWorkCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_START_CONTINUOUS_IMAGE:
handleStartContinuousImageCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_STOP_CONTINUOUS_IMAGE:
handleStopContinuousImageCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_SET_CALIBRATION:
handleSetCalibrationCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_GET_CALIBRATION:
handleGetCalibrationCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_SET_EXPOSURE_TIME:
handleSetExposureTimeCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_SET_GAIN:
handleSetGainCommand(pClient, jsonObj);
break;
case MarkMessageType::CMD_GET_CAMERA_INFO:
handleGetCameraInfoCommand(pClient, jsonObj);
break;
case MarkMessageType::HEARTBEAT_ACK:
// 心跳应答,不做处理
break;
default:
LOG_WARN("Unknown message type: %s\n", msgTypeStr.toStdString().c_str());
break;
}
}
void BinocularMarkTcpProtocol::handleHeartbeat(const TCPClient* pClient, const QJsonObject& jsonObj)
{
// 发送心跳应答
sendHeartbeatAck(pClient);
}
void BinocularMarkTcpProtocol::handleTriggerCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
// 触发检测
emit triggerDetection();
// 发送命令应答
sendCommandResponse(pClient, "trigger", true, 0, "OK");
}
void BinocularMarkTcpProtocol::handleSingleDetectionCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
// 触发单次检测
emit singleDetectionRequested(pClient);
// 发送命令应答
sendCommandResponse(pClient, "single_detection", true, 0, "OK");
}
void BinocularMarkTcpProtocol::handleSingleImageCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
// 触发单次图像获取
emit singleImageRequested(pClient);
// 发送命令应答
sendCommandResponse(pClient, "single_image", true, 0, "OK");
}
void BinocularMarkTcpProtocol::handleStartWorkCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
// 开始持续工作
emit startWorkRequested();
// 发送命令应答
sendCommandResponse(pClient, "start_work", true, 0, "OK");
}
void BinocularMarkTcpProtocol::handleStopWorkCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
// 停止持续工作
emit stopWorkRequested();
// 发送命令应答
sendCommandResponse(pClient, "stop_work", true, 0, "OK");
}
void BinocularMarkTcpProtocol::handleStartContinuousImageCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
// 开始持续图像流
emit startContinuousImageRequested();
// 发送命令应答
sendCommandResponse(pClient, "start_continuous_image", true, 0, "OK");
}
void BinocularMarkTcpProtocol::handleStopContinuousImageCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
// 停止持续图像流
emit stopContinuousImageRequested();
// 发送命令应答
sendCommandResponse(pClient, "stop_continuous_image", true, 0, "OK");
}
void BinocularMarkTcpProtocol::handleSetCalibrationCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
QString calibrationXml = jsonObj["calibration_xml"].toString();
if (calibrationXml.isEmpty()) {
sendCommandResponse(pClient, "set_calibration", false, -1, "Empty calibration data");
return;
}
// 发送信号给Presenter处理
emit setCalibrationRequested(pClient, calibrationXml);
// 发送命令应答
sendCommandResponse(pClient, "set_calibration", true, 0, "OK");
}
void BinocularMarkTcpProtocol::handleGetCalibrationCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
emit getCalibrationRequested(pClient);
}
void BinocularMarkTcpProtocol::sendCalibrationMatrixResponse(const TCPClient* pClient, const QString& calibrationXml)
{
QJsonObject resultObj;
resultObj["msg_type"] = "calibration_matrix_response";
resultObj["timestamp"] = QDateTime::currentMSecsSinceEpoch();
resultObj["calibration_xml"] = calibrationXml;
QJsonDocument doc(resultObj);
QByteArray jsonData = doc.toJson(QJsonDocument::Compact);
QByteArray frameData = buildFrame(jsonData);
if (m_pTcpServer != nullptr) {
m_pTcpServer->SendData(pClient, frameData.data(), frameData.size());
}
}
void BinocularMarkTcpProtocol::handleSetExposureTimeCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
double exposureTime = jsonObj["exposure_time"].toDouble();
if (exposureTime <= 0) {
sendCommandResponse(pClient, "set_exposure_time", false, -1, "Invalid exposure time");
return;
}
// 检查是否指定了相机
QString camera = jsonObj["camera"].toString();
if (camera == "left") {
emit setLeftExposureTimeRequested(exposureTime);
LOG_INFO("Left camera exposure time set: %.2f\n", exposureTime);
} else if (camera == "right") {
emit setRightExposureTimeRequested(exposureTime);
LOG_INFO("Right camera exposure time set: %.2f\n", exposureTime);
} else {
// 未指定相机,同时设置左右相机
emit setExposureTimeRequested(exposureTime);
LOG_INFO("Both cameras exposure time set: %.2f\n", exposureTime);
}
sendCommandResponse(pClient, "set_exposure_time", true, 0, "OK");
}
void BinocularMarkTcpProtocol::handleSetGainCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
double gain = jsonObj["gain"].toDouble();
if (gain <= 0) {
sendCommandResponse(pClient, "set_gain", false, -1, "Invalid gain");
return;
}
// 检查是否指定了相机
QString camera = jsonObj["camera"].toString();
if (camera == "left") {
emit setLeftGainRequested(gain);
LOG_INFO("Left camera gain set: %.2f\n", gain);
} else if (camera == "right") {
emit setRightGainRequested(gain);
LOG_INFO("Right camera gain set: %.2f\n", gain);
} else {
// 未指定相机,同时设置左右相机
emit setGainRequested(gain);
LOG_INFO("Both cameras gain set: %.2f\n", gain);
}
sendCommandResponse(pClient, "set_gain", true, 0, "OK");
}
void BinocularMarkTcpProtocol::sendHeartbeatAck(const TCPClient* pClient)
{
QJsonObject ackObj;
ackObj["msg_type"] = "heartbeat_ack";
ackObj["timestamp"] = QDateTime::currentMSecsSinceEpoch();
QJsonDocument doc(ackObj);
QByteArray jsonData = doc.toJson(QJsonDocument::Compact);
QByteArray frameData = buildFrame(jsonData);
if (m_pTcpServer != nullptr)
{
m_pTcpServer->SendData(pClient, frameData.data(), frameData.size());
}
}
void BinocularMarkTcpProtocol::sendCommandResponse(const TCPClient* pClient, const QString& cmdType,
bool result, int errorCode, const QString& errorMsg)
{
QJsonObject responseObj;
responseObj["msg_type"] = "cmd_response";
responseObj["cmd_type"] = cmdType;
responseObj["result"] = result;
responseObj["error_code"] = errorCode;
responseObj["error_msg"] = errorMsg;
responseObj["timestamp"] = QDateTime::currentMSecsSinceEpoch();
QJsonDocument doc(responseObj);
QByteArray jsonData = doc.toJson(QJsonDocument::Compact);
QByteArray frameData = buildFrame(jsonData);
if (m_pTcpServer != nullptr)
{
m_pTcpServer->SendData(pClient, frameData.data(), frameData.size());
}
}
void BinocularMarkTcpProtocol::sendMarkResult(const std::vector<SWD_charuco3DMark>& marks,
const cv::Mat& leftImage,
const cv::Mat& rightImage,
int errorCode)
{
// 检查是否有图像数据如果没有marks且没有图像说明是持续图像流模式
bool isContinuousImageMode = marks.empty() && (!leftImage.empty() || !rightImage.empty());
// 丢帧策略:如果上一帧还在处理,跳过本次发送
bool expected = false;
if (!m_bIsProcessingFrame.compare_exchange_strong(expected, true)) {
return; // 静默丢帧,不输出日志
}
QJsonObject resultObj;
resultObj["msg_type"] = "mark_result";
resultObj["timestamp"] = QDateTime::currentMSecsSinceEpoch();
resultObj["error_code"] = errorCode;
resultObj["mark_count"] = static_cast<int>(marks.size());
// 添加标记数据
QJsonArray marksArray;
for (const auto& mark : marks)
{
QJsonObject markObj;
markObj["mark_id"] = mark.markID;
markObj["x"] = mark.mark3D.x;
markObj["y"] = mark.mark3D.y;
markObj["z"] = mark.mark3D.z;
marksArray.append(markObj);
}
resultObj["marks"] = marksArray;
// 并行编码图像
std::future<QString> leftFuture;
std::future<QString> rightFuture;
if (!leftImage.empty())
{
leftFuture = std::async(std::launch::async, [this, leftImage]() {
return imageToBase64(leftImage);
});
}
if (!rightImage.empty())
{
rightFuture = std::async(std::launch::async, [this, rightImage]() {
return imageToBase64(rightImage);
});
}
// 获取编码结果
if (!leftImage.empty())
{
resultObj["left_image"] = leftFuture.get();
}
if (!rightImage.empty())
{
resultObj["right_image"] = rightFuture.get();
}
QJsonDocument doc(resultObj);
QByteArray jsonData = doc.toJson(QJsonDocument::Compact);
QByteArray frameData = buildFrame(jsonData);
// 发送给所有客户端
if (m_pTcpServer != nullptr)
{
m_pTcpServer->SendAllData(frameData.data(), frameData.size());
}
// 处理完成,清除标志位
m_bIsProcessingFrame = false;
LOG_INFO("Sent mark result, mark_count: %zu, error_code: %d\n", marks.size(), errorCode);
}
void BinocularMarkTcpProtocol::sendSingleDetectionResult(const TCPClient* pClient,
const std::vector<SWD_charuco3DMark>& marks,
const cv::Mat& leftImage,
const cv::Mat& rightImage,
int errorCode)
{
QJsonObject resultObj;
resultObj["msg_type"] = "single_detection_result";
resultObj["timestamp"] = QDateTime::currentMSecsSinceEpoch();
resultObj["error_code"] = errorCode;
resultObj["mark_count"] = static_cast<int>(marks.size());
// 添加标记数据
QJsonArray marksArray;
for (const auto& mark : marks)
{
QJsonObject markObj;
markObj["mark_id"] = mark.markID;
markObj["x"] = mark.mark3D.x;
markObj["y"] = mark.mark3D.y;
markObj["z"] = mark.mark3D.z;
marksArray.append(markObj);
}
resultObj["marks"] = marksArray;
// 添加图像Base64编码
if (!leftImage.empty())
{
resultObj["left_image"] = imageToBase64(leftImage);
}
if (!rightImage.empty())
{
resultObj["right_image"] = imageToBase64(rightImage);
}
QJsonDocument doc(resultObj);
QByteArray jsonData = doc.toJson(QJsonDocument::Compact);
QByteArray frameData = buildFrame(jsonData);
// 发送给请求的客户端
if (m_pTcpServer != nullptr && pClient != nullptr)
{
m_pTcpServer->SendData(pClient, frameData.data(), frameData.size());
}
LOG_INFO("Sent single detection result, mark_count: %zu, error_code: %d\n", marks.size(), errorCode);
}
void BinocularMarkTcpProtocol::sendImageData(const TCPClient* pClient, const cv::Mat& leftImage, const cv::Mat& rightImage)
{
QJsonObject resultObj;
resultObj["msg_type"] = "image_data";
resultObj["timestamp"] = QDateTime::currentMSecsSinceEpoch();
// 并行编码图像
std::future<QString> leftFuture;
std::future<QString> rightFuture;
bool hasLeft = !leftImage.empty();
bool hasRight = !rightImage.empty();
if (hasLeft)
{
leftFuture = std::async(std::launch::async, [this, &leftImage]() {
return imageToBase64(leftImage);
});
}
if (hasRight)
{
rightFuture = std::async(std::launch::async, [this, &rightImage]() {
return imageToBase64(rightImage);
});
}
// 获取编码结果
if (hasLeft)
{
resultObj["left_image"] = leftFuture.get();
}
if (hasRight)
{
resultObj["right_image"] = rightFuture.get();
}
QJsonDocument doc(resultObj);
QByteArray jsonData = doc.toJson(QJsonDocument::Compact);
QByteArray frameData = buildFrame(jsonData);
// 发送给请求的客户端
if (m_pTcpServer != nullptr && pClient != nullptr)
{
m_pTcpServer->SendData(pClient, frameData.data(), frameData.size());
}
LOG_INFO("Sent image data\n");
}
QString BinocularMarkTcpProtocol::imageToBase64(const cv::Mat& image)
{
// 将cv::Mat编码为JPEG格式质量设置为70以减少数据量
std::vector<uchar> buf;
std::vector<int> params = {cv::IMWRITE_JPEG_QUALITY, 70};
cv::imencode(".jpg", image, buf, params);
// 转换为Base64
QByteArray ba(reinterpret_cast<const char*>(buf.data()), buf.size());
return ba.toBase64();
}
QString BinocularMarkTcpProtocol::generateClientId(const TCPClient* pClient)
{
return QString::number(reinterpret_cast<qintptr>(pClient));
}
void BinocularMarkTcpProtocol::handleGetCameraInfoCommand(const TCPClient* pClient, const QJsonObject& jsonObj)
{
QString camera = jsonObj["camera"].toString();
if (camera != "left" && camera != "right") {
sendCommandResponse(pClient, "get_camera_info", false, -1, "Invalid camera parameter (must be 'left' or 'right')");
return;
}
// 触发获取相机信息请求
emit getCameraInfoRequested(pClient, camera);
LOG_INFO("Camera info requested: %s\n", camera.toStdString().c_str());
}
void BinocularMarkTcpProtocol::sendCameraInfoResponse(const TCPClient* pClient,
const QString& camera,
const QString& serialNumber,
const QString& modelName,
const QString& displayName,
double exposureTime,
double gain)
{
QJsonObject responseObj;
responseObj["msg_type"] = "camera_info_response";
responseObj["camera"] = camera;
responseObj["serial_number"] = serialNumber;
responseObj["model_name"] = modelName;
responseObj["display_name"] = displayName;
responseObj["exposure_time"] = exposureTime;
responseObj["gain"] = gain;
responseObj["timestamp"] = QDateTime::currentMSecsSinceEpoch();
QJsonDocument doc(responseObj);
QByteArray jsonData = doc.toJson(QJsonDocument::Compact);
QByteArray frameData = buildFrame(jsonData);
if (m_pTcpServer != nullptr && pClient != nullptr)
{
m_pTcpServer->SendData(pClient, frameData.data(), frameData.size());
}
LOG_INFO("Sent camera info response: %s, SN=%s, Model=%s, Exposure=%.2f, Gain=%.2f\n",
camera.toStdString().c_str(),
serialNumber.toStdString().c_str(),
modelName.toStdString().c_str(),
exposureTime,
gain);
}