feat: 重新规划ui布局;增加左右红外以及rgb相机可视化

This commit is contained in:
2026-01-21 15:36:47 +08:00
parent 8b07397b5b
commit 93c7c1a86b
10 changed files with 1287 additions and 352 deletions

View File

@@ -6,16 +6,8 @@
GVSPParser::GVSPParser(QObject *parent)
: QObject(parent)
, m_isReceiving(false)
, m_dataType(0)
, m_currentBlockId(0)
, m_expectedSize(0)
, m_receivedSize(0)
, m_imageWidth(0)
, m_imageHeight(0)
, m_pixelFormat(0)
, m_lastBlockId(0)
, m_packetCount(0)
, m_imageSequence(0)
{
m_imageProcessingCount.storeRelaxed(0);
}
@@ -26,13 +18,12 @@ GVSPParser::~GVSPParser()
void GVSPParser::reset()
{
m_isReceiving = false;
m_dataType = 0;
m_currentBlockId = 0;
m_dataBuffer.clear();
m_expectedSize = 0;
m_receivedSize = 0;
m_packetCount = 0;
m_leftIRState = StreamState();
m_rightIRState = StreamState();
m_rgbState = StreamState();
m_depthState = StreamState();
m_pointCloudState = StreamState();
m_imageSequence = 0;
}
void GVSPParser::parsePacket(const QByteArray &packet)
@@ -85,16 +76,12 @@ void GVSPParser::handleLeaderPacket(const uint8_t *data, size_t size)
}
const GVSPPacketHeader *header = reinterpret_cast<const GVSPPacketHeader*>(data);
m_currentBlockId = ntohs(header->block_id);
uint32_t blockId = ntohs(header->block_id);
// Check payload type
const uint16_t *payload_type_ptr = reinterpret_cast<const uint16_t*>(data + sizeof(GVSPPacketHeader) + 2);
uint16_t payload_type = ntohs(*payload_type_ptr);
// qDebug() << "[GVSPParser] Leader packet: BlockID=" << m_currentBlockId
// << "PayloadType=0x" << Qt::hex << payload_type << Qt::dec
// << "Size=" << size;
if (payload_type == PAYLOAD_TYPE_IMAGE) {
// Image data leader
if (size < sizeof(GVSPPacketHeader) + sizeof(GVSPImageDataLeader)) {
@@ -102,64 +89,102 @@ void GVSPParser::handleLeaderPacket(const uint8_t *data, size_t size)
return;
}
const GVSPImageDataLeader *leader = reinterpret_cast<const GVSPImageDataLeader*>(data + sizeof(GVSPPacketHeader));
m_dataType = 1;
m_imageWidth = ntohl(leader->size_x);
m_imageHeight = ntohl(leader->size_y);
m_pixelFormat = ntohl(leader->pixel_format);
m_expectedSize = m_imageWidth * m_imageHeight * 2; // 12-bit packed in 16-bit
m_dataBuffer.clear();
m_dataBuffer.reserve(m_expectedSize);
m_receivedSize = 0;
m_isReceiving = true;
m_packetCount = 0;
// 注释掉频繁的日志输出
// qDebug() << "Image Leader: Block" << m_currentBlockId
// << "Size:" << m_imageWidth << "x" << m_imageHeight;
uint32_t imageWidth = ntohl(leader->size_x);
uint32_t imageHeight = ntohl(leader->size_y);
uint32_t pixelFormat = ntohl(leader->pixel_format);
// 根据像素格式选择对应的状态
StreamState *state = nullptr;
if (pixelFormat == PIXEL_FORMAT_MONO16_LEFT) {
state = &m_leftIRState;
} else if (pixelFormat == PIXEL_FORMAT_MONO16_RIGHT) {
state = &m_rightIRState;
} else if (pixelFormat == PIXEL_FORMAT_MJPEG) {
state = &m_rgbState;
} else if (pixelFormat == PIXEL_FORMAT_MONO16 || pixelFormat == PIXEL_FORMAT_12BIT_GRAY) {
// Legacy格式根据序号分配
int imageType = m_imageSequence % 2;
state = (imageType == 0) ? &m_leftIRState : &m_rightIRState;
}
if (state) {
state->blockId = blockId;
state->imageWidth = imageWidth;
state->imageHeight = imageHeight;
state->pixelFormat = pixelFormat;
// 根据pixel format设置期望大小
if (pixelFormat == PIXEL_FORMAT_MJPEG) {
// MJPEG是压缩格式实际大小未知设置为0表示动态接收
state->expectedSize = 0;
} else {
// 16-bit或12-bit灰度等固定格式
state->expectedSize = imageWidth * imageHeight * 2;
}
state->dataBuffer.clear();
if (state->expectedSize > 0) {
state->dataBuffer.reserve(state->expectedSize);
}
state->receivedSize = 0;
state->isReceiving = true;
state->packetCount = 0;
}
}
else if (payload_type == PAYLOAD_TYPE_BINARY) {
// Depth data leader
const GVSPBinaryDataLeader *leader = reinterpret_cast<const GVSPBinaryDataLeader*>(data + sizeof(GVSPPacketHeader));
m_dataType = 3;
m_expectedSize = ntohl(leader->file_size);
m_dataBuffer.clear();
m_dataBuffer.reserve(m_expectedSize);
m_receivedSize = 0;
m_isReceiving = true;
m_packetCount = 0;
// 注释掉频繁的日志输出
// qDebug() << "Depth Leader: Block" << m_currentBlockId
// << "Size:" << m_expectedSize << "bytes";
m_depthState.blockId = blockId;
m_depthState.expectedSize = ntohl(leader->file_size);
m_depthState.dataBuffer.clear();
m_depthState.dataBuffer.reserve(m_depthState.expectedSize);
m_depthState.receivedSize = 0;
m_depthState.isReceiving = true;
m_depthState.packetCount = 0;
}
else if (payload_type == PAYLOAD_TYPE_POINTCLOUD) {
// Point cloud data leader (vendor-specific 0x8000)
const GVSPPointCloudDataLeader *leader = reinterpret_cast<const GVSPPointCloudDataLeader*>(data + sizeof(GVSPPacketHeader));
m_dataType = 4; // 新类型:点云数据
m_expectedSize = ntohl(leader->data_size);
m_dataBuffer.clear();
m_dataBuffer.reserve(m_expectedSize);
m_receivedSize = 0;
m_isReceiving = true;
m_packetCount = 0;
// qDebug() << "[PointCloud Leader] Block:" << m_currentBlockId
// << "Expected Size:" << m_expectedSize << "bytes";
m_pointCloudState.blockId = blockId;
m_pointCloudState.expectedSize = ntohl(leader->data_size);
m_pointCloudState.dataBuffer.clear();
m_pointCloudState.dataBuffer.reserve(m_pointCloudState.expectedSize);
m_pointCloudState.receivedSize = 0;
m_pointCloudState.isReceiving = true;
m_pointCloudState.packetCount = 0;
}
}
void GVSPParser::handlePayloadPacket(const uint8_t *data, size_t size)
{
if (!m_isReceiving) {
if (size < sizeof(GVSPPacketHeader)) {
return;
}
const GVSPPacketHeader *header = reinterpret_cast<const GVSPPacketHeader*>(data);
uint32_t blockId = ntohs(header->block_id);
// 查找匹配的状态
StreamState *state = nullptr;
if (m_leftIRState.isReceiving && m_leftIRState.blockId == blockId) {
state = &m_leftIRState;
} else if (m_rightIRState.isReceiving && m_rightIRState.blockId == blockId) {
state = &m_rightIRState;
} else if (m_rgbState.isReceiving && m_rgbState.blockId == blockId) {
state = &m_rgbState;
} else if (m_depthState.isReceiving && m_depthState.blockId == blockId) {
state = &m_depthState;
} else if (m_pointCloudState.isReceiving && m_pointCloudState.blockId == blockId) {
state = &m_pointCloudState;
}
if (!state) {
return; // 没有匹配的接收状态
}
if (size <= sizeof(GVSPPacketHeader)) {
return;
}
@@ -169,58 +194,130 @@ void GVSPParser::handlePayloadPacket(const uint8_t *data, size_t size)
size_t payload_size = size - sizeof(GVSPPacketHeader);
// Append to buffer
m_dataBuffer.append(reinterpret_cast<const char*>(payload), payload_size);
m_receivedSize += payload_size;
m_packetCount++;
state->dataBuffer.append(reinterpret_cast<const char*>(payload), payload_size);
state->receivedSize += payload_size;
state->packetCount++;
}
void GVSPParser::handleTrailerPacket(const uint8_t *data, size_t size)
{
if (!m_isReceiving) {
if (size < sizeof(GVSPPacketHeader)) {
return;
}
// 注释掉频繁的日志输出
// qDebug() << "Trailer received: Block" << m_currentBlockId
// << "Received" << m_receivedSize << "/" << m_expectedSize << "bytes"
// << "Packets:" << m_packetCount;
const GVSPPacketHeader *header = reinterpret_cast<const GVSPPacketHeader*>(data);
uint32_t blockId = ntohs(header->block_id);
// Process complete data
if (m_dataType == 1) {
processImageData();
} else if (m_dataType == 3) {
processDepthData();
} else if (m_dataType == 4) {
processPointCloudData();
// 查找匹配的状态并处理
if (m_leftIRState.isReceiving && m_leftIRState.blockId == blockId) {
processImageData(&m_leftIRState);
m_leftIRState.isReceiving = false;
m_lastBlockId = blockId;
} else if (m_rightIRState.isReceiving && m_rightIRState.blockId == blockId) {
processImageData(&m_rightIRState);
m_rightIRState.isReceiving = false;
m_lastBlockId = blockId;
} else if (m_rgbState.isReceiving && m_rgbState.blockId == blockId) {
processImageData(&m_rgbState);
m_rgbState.isReceiving = false;
m_lastBlockId = blockId;
} else if (m_depthState.isReceiving && m_depthState.blockId == blockId) {
processDepthData(&m_depthState);
m_depthState.isReceiving = false;
m_lastBlockId = blockId;
} else if (m_pointCloudState.isReceiving && m_pointCloudState.blockId == blockId) {
processPointCloudData(&m_pointCloudState);
m_pointCloudState.isReceiving = false;
m_lastBlockId = blockId;
}
// Reset state
m_isReceiving = false;
m_lastBlockId = m_currentBlockId;
}
void GVSPParser::processImageData()
void GVSPParser::processImageData(GVSPParser::StreamState *state)
{
if (m_dataBuffer.size() < m_expectedSize) {
if (!state) return;
// 处理MJPEG格式RGB相机
if (state->pixelFormat == PIXEL_FORMAT_MJPEG) {
// RGB MJPEG
emit rgbImageReceived(state->dataBuffer, state->blockId);
m_imageSequence++;
return;
}
// 处理Mono16格式左右红外相机原始16位数据
// 使用像素格式直接区分左右,不依赖接收顺序
if (state->pixelFormat == PIXEL_FORMAT_MONO16_LEFT) {
// 检查数据大小
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
// 左红外原始数据
emit leftImageReceived(state->dataBuffer, state->blockId);
m_imageSequence++;
return;
}
if (state->pixelFormat == PIXEL_FORMAT_MONO16_RIGHT) {
// 检查数据大小
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
// 右红外原始数据
emit rightImageReceived(state->dataBuffer, state->blockId);
m_imageSequence++;
return;
}
// 兼容旧版本使用序号区分legacy
if (state->pixelFormat == PIXEL_FORMAT_MONO16) {
// 检查数据大小
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
// 根据图像序号区分:偶数=左红外,奇数=右红外
int imageType = m_imageSequence % 2;
if (imageType == 0) {
// 左红外原始数据
emit leftImageReceived(state->dataBuffer, state->blockId);
} else {
// 右红外原始数据
emit rightImageReceived(state->dataBuffer, state->blockId);
}
m_imageSequence++;
return;
}
// 处理12-bit灰度格式 - 固定大小,需要检查
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
// 处理12-bit灰度图像左右红外相机
if (state->pixelFormat != PIXEL_FORMAT_12BIT_GRAY) {
return;
}
// 节流机制如果已有3个或更多图像在处理中跳过当前帧
if (m_imageProcessingCount.loadAcquire() >= 3) {
m_imageSequence++;
return;
}
// 增加处理计数
m_imageProcessingCount.ref();
// 复制数据到局部变量,避免在异步处理时数据被覆盖
QByteArray dataCopy = m_dataBuffer;
uint32_t blockId = m_currentBlockId;
size_t width = m_imageWidth;
size_t height = m_imageHeight;
// 复制数据到局部变量
QByteArray dataCopy = state->dataBuffer;
uint32_t blockId = state->blockId;
size_t width = state->imageWidth;
size_t height = state->imageHeight;
int imageSeq = m_imageSequence;
// 使用QtConcurrent在后台线程处理图像数据
QtConcurrent::run([this, dataCopy, blockId, width, height]() {
QtConcurrent::run([this, dataCopy, blockId, width, height, imageSeq]() {
// Convert 16-bit depth data to 8-bit grayscale for display
const uint16_t *src = reinterpret_cast<const uint16_t*>(dataCopy.constData());
QImage image(width, height, QImage::Format_Grayscale8);
@@ -238,48 +335,49 @@ void GVSPParser::processImageData()
}
}
// 归一化并水平翻转(修正左右镜像
// 归一化(不翻转,保持原始方向
uint8_t *dst = image.bits();
float scale = (maxVal > minVal) ? (255.0f / (maxVal - minVal)) : 0.0f;
for (size_t row = 0; row < height; row++) {
for (size_t col = 0; col < width; col++) {
size_t srcIdx = row * width + col;
size_t dstIdx = row * width + (width - 1 - col); // 水平翻转
uint16_t val = src[srcIdx];
dst[dstIdx] = (val == 0) ? 0 : static_cast<uint8_t>((val - minVal) * scale);
size_t idx = row * width + col;
uint16_t val = src[idx];
dst[idx] = (val == 0) ? 0 : static_cast<uint8_t>((val - minVal) * scale);
}
}
// 注意此代码路径已废弃下位机现在发送JPEG格式
// 仅保留兼容旧代码的信号
emit imageReceived(image, blockId);
// 减少处理计数
m_imageProcessingCount.deref();
});
// 增加图像序号
m_imageSequence++;
}
void GVSPParser::processDepthData()
void GVSPParser::processDepthData(GVSPParser::StreamState *state)
{
if (m_dataBuffer.size() < m_expectedSize) {
// 注释掉频繁的警告日志
// qDebug() << "Warning: Incomplete depth data" << m_dataBuffer.size() << "/" << m_expectedSize;
if (!state) return;
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
emit depthDataReceived(m_dataBuffer, m_currentBlockId);
emit depthDataReceived(state->dataBuffer, state->blockId);
}
void GVSPParser::processPointCloudData()
void GVSPParser::processPointCloudData(GVSPParser::StreamState *state)
{
// qDebug() << "[PointCloud] Received:" << m_dataBuffer.size()
// << "bytes, Expected:" << m_expectedSize << "bytes";
if (!state) return;
if (m_dataBuffer.size() < m_expectedSize) {
qDebug() << "[PointCloud] ERROR: Data incomplete, skipping!";
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
// qDebug() << "[PointCloud] Data complete, emitting pointCloudDataReceived signal...";
// 点云数据直接发送,格式为 short 数组 (x, y, z, x, y, z, ...)
emit pointCloudDataReceived(m_dataBuffer, m_currentBlockId);
emit pointCloudDataReceived(state->dataBuffer, state->blockId);
}