feat: 重新规划ui布局;增加左右红外以及rgb相机可视化

This commit is contained in:
2026-01-21 15:36:47 +08:00
parent 8b07397b5b
commit 93c7c1a86b
10 changed files with 1287 additions and 352 deletions

View File

@@ -148,7 +148,7 @@ C:\Program Files\D330Viewer\
- ✅ UDP网络通信GVSP协议解析
- ✅ 自动设备扫描和发现
- ✅ 相机连接管理(连接/断开)
- ✅ 命令发送START/STOP/ONCE
- ✅ 命令发送START/STOP
#### 可视化
- ✅ 实时深度图显示OpenCV垂直翻转校正
@@ -161,7 +161,7 @@ C:\Program Files\D330Viewer\
#### 用户界面
- ✅ Qt6 GUI主窗口三栏布局
- ✅ 相机控制面板START/STOP/ONCE按钮)
- ✅ 相机控制面板START/STOP按钮
- ✅ 曝光时间调节滑块范围460-100000μs
- ✅ 网络配置IP地址、端口设置
- ✅ 连接状态指示
@@ -258,7 +258,6 @@ d330viewer/
5. **停止采集**
- 点击"停止"按钮停止采集
- 点击"单次"按钮采集单帧
### 点云交互操作

View File

@@ -19,10 +19,16 @@
// Image format
#define PIXEL_FORMAT_12BIT_GRAY 0x010C0001
#define PIXEL_FORMAT_MONO16 0x01100005 // Mono16 format (legacy)
#define PIXEL_FORMAT_MONO16_LEFT 0x01100006 // Mono16 format for left IR camera
#define PIXEL_FORMAT_MONO16_RIGHT 0x01100007 // Mono16 format for right IR camera
#define PIXEL_FORMAT_MJPEG 0x02180001 // MJPEG format for RGB camera
// Image dimensions
#define IMAGE_WIDTH 1224
#define IMAGE_HEIGHT 1024
#define RGB_WIDTH 1920
#define RGB_HEIGHT 1080
#pragma pack(push, 1)
@@ -105,39 +111,54 @@ public:
void reset();
signals:
void imageReceived(const QImage &image, uint32_t blockId);
void depthDataReceived(const QByteArray &depthData, uint32_t blockId);
void leftImageReceived(const QByteArray &jpegData, uint32_t blockId); // 左红外图像(JPEG)
void rightImageReceived(const QByteArray &jpegData, uint32_t blockId); // 右红外图像(JPEG)
void rgbImageReceived(const QByteArray &jpegData, uint32_t blockId); // RGB图像(MJPEG)
void pointCloudDataReceived(const QByteArray &cloudData, uint32_t blockId);
void parseError(const QString &error);
// 兼容旧代码
void imageReceived(const QImage &image, uint32_t blockId);
void depthDataReceived(const QByteArray &depthData, uint32_t blockId);
private:
// 每种数据流的独立接收状态
struct StreamState {
bool isReceiving;
uint32_t blockId;
QByteArray dataBuffer;
size_t expectedSize;
size_t receivedSize;
int packetCount;
// 图像特有信息
uint32_t imageWidth;
uint32_t imageHeight;
uint32_t pixelFormat;
StreamState() : isReceiving(false), blockId(0), expectedSize(0),
receivedSize(0), packetCount(0), imageWidth(0),
imageHeight(0), pixelFormat(0) {}
};
void handleLeaderPacket(const uint8_t *data, size_t size);
void handlePayloadPacket(const uint8_t *data, size_t size);
void handleTrailerPacket(const uint8_t *data, size_t size);
void processImageData();
void processDepthData();
void processPointCloudData();
void processImageData(StreamState *state);
void processDepthData(StreamState *state);
void processPointCloudData(StreamState *state);
private:
// Reception state
bool m_isReceiving;
int m_dataType; // 0=unknown, 1=image, 3=depth, 4=pointcloud
uint32_t m_currentBlockId;
// Data buffer
QByteArray m_dataBuffer;
size_t m_expectedSize;
size_t m_receivedSize;
// Image info
uint32_t m_imageWidth;
uint32_t m_imageHeight;
uint32_t m_pixelFormat;
// 为每种数据类型维护独立的状态
StreamState m_leftIRState; // 左红外
StreamState m_rightIRState; // 右红外
StreamState m_rgbState; // RGB
StreamState m_depthState; // 深度数据
StreamState m_pointCloudState; // 点云数据
// Statistics
uint32_t m_lastBlockId;
int m_packetCount;
int m_imageSequence;
// Async processing control
QAtomicInt m_imageProcessingCount;

View File

@@ -49,6 +49,10 @@ private:
std::vector<float> m_vertices;
int m_pointCount;
// 固定的点云中心点(避免抖动)
QVector3D m_fixedCenter;
bool m_centerInitialized;
// 相机参数
QMatrix4x4 m_projection;
QMatrix4x4 m_view;

View File

@@ -6,16 +6,8 @@
GVSPParser::GVSPParser(QObject *parent)
: QObject(parent)
, m_isReceiving(false)
, m_dataType(0)
, m_currentBlockId(0)
, m_expectedSize(0)
, m_receivedSize(0)
, m_imageWidth(0)
, m_imageHeight(0)
, m_pixelFormat(0)
, m_lastBlockId(0)
, m_packetCount(0)
, m_imageSequence(0)
{
m_imageProcessingCount.storeRelaxed(0);
}
@@ -26,13 +18,12 @@ GVSPParser::~GVSPParser()
void GVSPParser::reset()
{
m_isReceiving = false;
m_dataType = 0;
m_currentBlockId = 0;
m_dataBuffer.clear();
m_expectedSize = 0;
m_receivedSize = 0;
m_packetCount = 0;
m_leftIRState = StreamState();
m_rightIRState = StreamState();
m_rgbState = StreamState();
m_depthState = StreamState();
m_pointCloudState = StreamState();
m_imageSequence = 0;
}
void GVSPParser::parsePacket(const QByteArray &packet)
@@ -85,16 +76,12 @@ void GVSPParser::handleLeaderPacket(const uint8_t *data, size_t size)
}
const GVSPPacketHeader *header = reinterpret_cast<const GVSPPacketHeader*>(data);
m_currentBlockId = ntohs(header->block_id);
uint32_t blockId = ntohs(header->block_id);
// Check payload type
const uint16_t *payload_type_ptr = reinterpret_cast<const uint16_t*>(data + sizeof(GVSPPacketHeader) + 2);
uint16_t payload_type = ntohs(*payload_type_ptr);
// qDebug() << "[GVSPParser] Leader packet: BlockID=" << m_currentBlockId
// << "PayloadType=0x" << Qt::hex << payload_type << Qt::dec
// << "Size=" << size;
if (payload_type == PAYLOAD_TYPE_IMAGE) {
// Image data leader
if (size < sizeof(GVSPPacketHeader) + sizeof(GVSPImageDataLeader)) {
@@ -102,64 +89,102 @@ void GVSPParser::handleLeaderPacket(const uint8_t *data, size_t size)
return;
}
const GVSPImageDataLeader *leader = reinterpret_cast<const GVSPImageDataLeader*>(data + sizeof(GVSPPacketHeader));
m_dataType = 1;
m_imageWidth = ntohl(leader->size_x);
m_imageHeight = ntohl(leader->size_y);
m_pixelFormat = ntohl(leader->pixel_format);
m_expectedSize = m_imageWidth * m_imageHeight * 2; // 12-bit packed in 16-bit
m_dataBuffer.clear();
m_dataBuffer.reserve(m_expectedSize);
m_receivedSize = 0;
m_isReceiving = true;
m_packetCount = 0;
// 注释掉频繁的日志输出
// qDebug() << "Image Leader: Block" << m_currentBlockId
// << "Size:" << m_imageWidth << "x" << m_imageHeight;
uint32_t imageWidth = ntohl(leader->size_x);
uint32_t imageHeight = ntohl(leader->size_y);
uint32_t pixelFormat = ntohl(leader->pixel_format);
// 根据像素格式选择对应的状态
StreamState *state = nullptr;
if (pixelFormat == PIXEL_FORMAT_MONO16_LEFT) {
state = &m_leftIRState;
} else if (pixelFormat == PIXEL_FORMAT_MONO16_RIGHT) {
state = &m_rightIRState;
} else if (pixelFormat == PIXEL_FORMAT_MJPEG) {
state = &m_rgbState;
} else if (pixelFormat == PIXEL_FORMAT_MONO16 || pixelFormat == PIXEL_FORMAT_12BIT_GRAY) {
// Legacy格式根据序号分配
int imageType = m_imageSequence % 2;
state = (imageType == 0) ? &m_leftIRState : &m_rightIRState;
}
if (state) {
state->blockId = blockId;
state->imageWidth = imageWidth;
state->imageHeight = imageHeight;
state->pixelFormat = pixelFormat;
// 根据pixel format设置期望大小
if (pixelFormat == PIXEL_FORMAT_MJPEG) {
// MJPEG是压缩格式实际大小未知设置为0表示动态接收
state->expectedSize = 0;
} else {
// 16-bit或12-bit灰度等固定格式
state->expectedSize = imageWidth * imageHeight * 2;
}
state->dataBuffer.clear();
if (state->expectedSize > 0) {
state->dataBuffer.reserve(state->expectedSize);
}
state->receivedSize = 0;
state->isReceiving = true;
state->packetCount = 0;
}
}
else if (payload_type == PAYLOAD_TYPE_BINARY) {
// Depth data leader
const GVSPBinaryDataLeader *leader = reinterpret_cast<const GVSPBinaryDataLeader*>(data + sizeof(GVSPPacketHeader));
m_dataType = 3;
m_expectedSize = ntohl(leader->file_size);
m_dataBuffer.clear();
m_dataBuffer.reserve(m_expectedSize);
m_receivedSize = 0;
m_isReceiving = true;
m_packetCount = 0;
// 注释掉频繁的日志输出
// qDebug() << "Depth Leader: Block" << m_currentBlockId
// << "Size:" << m_expectedSize << "bytes";
m_depthState.blockId = blockId;
m_depthState.expectedSize = ntohl(leader->file_size);
m_depthState.dataBuffer.clear();
m_depthState.dataBuffer.reserve(m_depthState.expectedSize);
m_depthState.receivedSize = 0;
m_depthState.isReceiving = true;
m_depthState.packetCount = 0;
}
else if (payload_type == PAYLOAD_TYPE_POINTCLOUD) {
// Point cloud data leader (vendor-specific 0x8000)
const GVSPPointCloudDataLeader *leader = reinterpret_cast<const GVSPPointCloudDataLeader*>(data + sizeof(GVSPPacketHeader));
m_dataType = 4; // 新类型:点云数据
m_expectedSize = ntohl(leader->data_size);
m_dataBuffer.clear();
m_dataBuffer.reserve(m_expectedSize);
m_receivedSize = 0;
m_isReceiving = true;
m_packetCount = 0;
// qDebug() << "[PointCloud Leader] Block:" << m_currentBlockId
// << "Expected Size:" << m_expectedSize << "bytes";
m_pointCloudState.blockId = blockId;
m_pointCloudState.expectedSize = ntohl(leader->data_size);
m_pointCloudState.dataBuffer.clear();
m_pointCloudState.dataBuffer.reserve(m_pointCloudState.expectedSize);
m_pointCloudState.receivedSize = 0;
m_pointCloudState.isReceiving = true;
m_pointCloudState.packetCount = 0;
}
}
void GVSPParser::handlePayloadPacket(const uint8_t *data, size_t size)
{
if (!m_isReceiving) {
if (size < sizeof(GVSPPacketHeader)) {
return;
}
const GVSPPacketHeader *header = reinterpret_cast<const GVSPPacketHeader*>(data);
uint32_t blockId = ntohs(header->block_id);
// 查找匹配的状态
StreamState *state = nullptr;
if (m_leftIRState.isReceiving && m_leftIRState.blockId == blockId) {
state = &m_leftIRState;
} else if (m_rightIRState.isReceiving && m_rightIRState.blockId == blockId) {
state = &m_rightIRState;
} else if (m_rgbState.isReceiving && m_rgbState.blockId == blockId) {
state = &m_rgbState;
} else if (m_depthState.isReceiving && m_depthState.blockId == blockId) {
state = &m_depthState;
} else if (m_pointCloudState.isReceiving && m_pointCloudState.blockId == blockId) {
state = &m_pointCloudState;
}
if (!state) {
return; // 没有匹配的接收状态
}
if (size <= sizeof(GVSPPacketHeader)) {
return;
}
@@ -169,58 +194,130 @@ void GVSPParser::handlePayloadPacket(const uint8_t *data, size_t size)
size_t payload_size = size - sizeof(GVSPPacketHeader);
// Append to buffer
m_dataBuffer.append(reinterpret_cast<const char*>(payload), payload_size);
m_receivedSize += payload_size;
m_packetCount++;
state->dataBuffer.append(reinterpret_cast<const char*>(payload), payload_size);
state->receivedSize += payload_size;
state->packetCount++;
}
void GVSPParser::handleTrailerPacket(const uint8_t *data, size_t size)
{
if (!m_isReceiving) {
if (size < sizeof(GVSPPacketHeader)) {
return;
}
// 注释掉频繁的日志输出
// qDebug() << "Trailer received: Block" << m_currentBlockId
// << "Received" << m_receivedSize << "/" << m_expectedSize << "bytes"
// << "Packets:" << m_packetCount;
const GVSPPacketHeader *header = reinterpret_cast<const GVSPPacketHeader*>(data);
uint32_t blockId = ntohs(header->block_id);
// Process complete data
if (m_dataType == 1) {
processImageData();
} else if (m_dataType == 3) {
processDepthData();
} else if (m_dataType == 4) {
processPointCloudData();
// 查找匹配的状态并处理
if (m_leftIRState.isReceiving && m_leftIRState.blockId == blockId) {
processImageData(&m_leftIRState);
m_leftIRState.isReceiving = false;
m_lastBlockId = blockId;
} else if (m_rightIRState.isReceiving && m_rightIRState.blockId == blockId) {
processImageData(&m_rightIRState);
m_rightIRState.isReceiving = false;
m_lastBlockId = blockId;
} else if (m_rgbState.isReceiving && m_rgbState.blockId == blockId) {
processImageData(&m_rgbState);
m_rgbState.isReceiving = false;
m_lastBlockId = blockId;
} else if (m_depthState.isReceiving && m_depthState.blockId == blockId) {
processDepthData(&m_depthState);
m_depthState.isReceiving = false;
m_lastBlockId = blockId;
} else if (m_pointCloudState.isReceiving && m_pointCloudState.blockId == blockId) {
processPointCloudData(&m_pointCloudState);
m_pointCloudState.isReceiving = false;
m_lastBlockId = blockId;
}
// Reset state
m_isReceiving = false;
m_lastBlockId = m_currentBlockId;
}
void GVSPParser::processImageData()
void GVSPParser::processImageData(GVSPParser::StreamState *state)
{
if (m_dataBuffer.size() < m_expectedSize) {
if (!state) return;
// 处理MJPEG格式RGB相机
if (state->pixelFormat == PIXEL_FORMAT_MJPEG) {
// RGB MJPEG
emit rgbImageReceived(state->dataBuffer, state->blockId);
m_imageSequence++;
return;
}
// 处理Mono16格式左右红外相机原始16位数据
// 使用像素格式直接区分左右,不依赖接收顺序
if (state->pixelFormat == PIXEL_FORMAT_MONO16_LEFT) {
// 检查数据大小
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
// 左红外原始数据
emit leftImageReceived(state->dataBuffer, state->blockId);
m_imageSequence++;
return;
}
if (state->pixelFormat == PIXEL_FORMAT_MONO16_RIGHT) {
// 检查数据大小
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
// 右红外原始数据
emit rightImageReceived(state->dataBuffer, state->blockId);
m_imageSequence++;
return;
}
// 兼容旧版本使用序号区分legacy
if (state->pixelFormat == PIXEL_FORMAT_MONO16) {
// 检查数据大小
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
// 根据图像序号区分:偶数=左红外,奇数=右红外
int imageType = m_imageSequence % 2;
if (imageType == 0) {
// 左红外原始数据
emit leftImageReceived(state->dataBuffer, state->blockId);
} else {
// 右红外原始数据
emit rightImageReceived(state->dataBuffer, state->blockId);
}
m_imageSequence++;
return;
}
// 处理12-bit灰度格式 - 固定大小,需要检查
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
// 处理12-bit灰度图像左右红外相机
if (state->pixelFormat != PIXEL_FORMAT_12BIT_GRAY) {
return;
}
// 节流机制如果已有3个或更多图像在处理中跳过当前帧
if (m_imageProcessingCount.loadAcquire() >= 3) {
m_imageSequence++;
return;
}
// 增加处理计数
m_imageProcessingCount.ref();
// 复制数据到局部变量,避免在异步处理时数据被覆盖
QByteArray dataCopy = m_dataBuffer;
uint32_t blockId = m_currentBlockId;
size_t width = m_imageWidth;
size_t height = m_imageHeight;
// 复制数据到局部变量
QByteArray dataCopy = state->dataBuffer;
uint32_t blockId = state->blockId;
size_t width = state->imageWidth;
size_t height = state->imageHeight;
int imageSeq = m_imageSequence;
// 使用QtConcurrent在后台线程处理图像数据
QtConcurrent::run([this, dataCopy, blockId, width, height]() {
QtConcurrent::run([this, dataCopy, blockId, width, height, imageSeq]() {
// Convert 16-bit depth data to 8-bit grayscale for display
const uint16_t *src = reinterpret_cast<const uint16_t*>(dataCopy.constData());
QImage image(width, height, QImage::Format_Grayscale8);
@@ -238,48 +335,49 @@ void GVSPParser::processImageData()
}
}
// 归一化并水平翻转(修正左右镜像
// 归一化(不翻转,保持原始方向
uint8_t *dst = image.bits();
float scale = (maxVal > minVal) ? (255.0f / (maxVal - minVal)) : 0.0f;
for (size_t row = 0; row < height; row++) {
for (size_t col = 0; col < width; col++) {
size_t srcIdx = row * width + col;
size_t dstIdx = row * width + (width - 1 - col); // 水平翻转
uint16_t val = src[srcIdx];
dst[dstIdx] = (val == 0) ? 0 : static_cast<uint8_t>((val - minVal) * scale);
size_t idx = row * width + col;
uint16_t val = src[idx];
dst[idx] = (val == 0) ? 0 : static_cast<uint8_t>((val - minVal) * scale);
}
}
// 注意此代码路径已废弃下位机现在发送JPEG格式
// 仅保留兼容旧代码的信号
emit imageReceived(image, blockId);
// 减少处理计数
m_imageProcessingCount.deref();
});
// 增加图像序号
m_imageSequence++;
}
void GVSPParser::processDepthData()
void GVSPParser::processDepthData(GVSPParser::StreamState *state)
{
if (m_dataBuffer.size() < m_expectedSize) {
// 注释掉频繁的警告日志
// qDebug() << "Warning: Incomplete depth data" << m_dataBuffer.size() << "/" << m_expectedSize;
if (!state) return;
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
emit depthDataReceived(m_dataBuffer, m_currentBlockId);
emit depthDataReceived(state->dataBuffer, state->blockId);
}
void GVSPParser::processPointCloudData()
void GVSPParser::processPointCloudData(GVSPParser::StreamState *state)
{
// qDebug() << "[PointCloud] Received:" << m_dataBuffer.size()
// << "bytes, Expected:" << m_expectedSize << "bytes";
if (!state) return;
if (m_dataBuffer.size() < m_expectedSize) {
qDebug() << "[PointCloud] ERROR: Data incomplete, skipping!";
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
// qDebug() << "[PointCloud] Data complete, emitting pointCloudDataReceived signal...";
// 点云数据直接发送,格式为 short 数组 (x, y, z, x, y, z, ...)
emit pointCloudDataReceived(m_dataBuffer, m_currentBlockId);
emit pointCloudDataReceived(state->dataBuffer, state->blockId);
}

View File

@@ -18,6 +18,9 @@ NetworkManager::NetworkManager(QObject *parent)
// 连接GVSP解析器信号
connect(m_gvspParser, &GVSPParser::imageReceived, this, &NetworkManager::imageReceived);
connect(m_gvspParser, &GVSPParser::leftImageReceived, this, &NetworkManager::leftImageReceived);
connect(m_gvspParser, &GVSPParser::rightImageReceived, this, &NetworkManager::rightImageReceived);
connect(m_gvspParser, &GVSPParser::rgbImageReceived, this, &NetworkManager::rgbImageReceived);
connect(m_gvspParser, &GVSPParser::depthDataReceived, this, &NetworkManager::depthDataReceived);
connect(m_gvspParser, &GVSPParser::pointCloudDataReceived, this, &NetworkManager::pointCloudDataReceived);
}
@@ -54,8 +57,8 @@ bool NetworkManager::connectToCamera(const QString &ip, int controlPort, int dat
return false;
}
// 设置UDP接收缓冲区大小为64MB减少丢包)
m_dataSocket->setSocketOption(QAbstractSocket::ReceiveBufferSizeSocketOption, QVariant(64 * 1024 * 1024));
// 设置UDP接收缓冲区大小为256MB最大化减少丢包)
m_dataSocket->setSocketOption(QAbstractSocket::ReceiveBufferSizeSocketOption, QVariant(256 * 1024 * 1024));
qDebug() << "Successfully bound data port" << m_dataPort;
qDebug() << "Data socket state:" << m_dataSocket->state();
@@ -129,15 +132,66 @@ bool NetworkManager::sendStopCommand()
return sendCommand("STOP");
}
bool NetworkManager::sendOnceCommand()
{
return sendCommand("ONCE");
}
bool NetworkManager::sendExposureCommand(int exposureTime)
{
QString command = QString("EXPOSURE:%1").arg(exposureTime);
return sendCommand(command);
// 同时发送结构光曝光命令UART控制激光器单位μs
QString exposureCommand = QString("EXPOSURE:%1").arg(exposureTime);
bool success1 = sendCommand(exposureCommand);
// 同时发送红外相机曝光命令通过触发脉冲宽度控制单位μs
// 下位机会将此值用作manual_trigger_pulse()的脉冲宽度参数
// 脉冲宽度直接决定相机的实际曝光时间
int irExposure = exposureTime;
// 限制在有效范围内1000μs ~ 100000μs避免脉冲太短导致相机无法触发
if (irExposure < 1000) irExposure = 1000;
if (irExposure > 100000) irExposure = 100000;
QString irExposureCommand = QString("IR_EXPOSURE:%1").arg(irExposure);
bool success2 = sendCommand(irExposureCommand);
return success1 && success2;
}
// ========== 传输开关命令 ==========
bool NetworkManager::sendEnableLeftIR()
{
return sendCommand("ENABLE_LEFT");
}
bool NetworkManager::sendDisableLeftIR()
{
return sendCommand("DISABLE_LEFT");
}
bool NetworkManager::sendEnableRightIR()
{
return sendCommand("ENABLE_RIGHT");
}
bool NetworkManager::sendDisableRightIR()
{
return sendCommand("DISABLE_RIGHT");
}
bool NetworkManager::sendEnableRGB()
{
return sendCommand("ENABLE_RGB");
}
bool NetworkManager::sendDisableRGB()
{
return sendCommand("DISABLE_RGB");
}
bool NetworkManager::sendMonocularMode()
{
return sendCommand("MONOCULAR");
}
bool NetworkManager::sendBinocularMode()
{
return sendCommand("BINOCULAR");
}
// ========== 槽函数 ==========

View File

@@ -26,15 +26,29 @@ public:
bool sendCommand(const QString &command);
bool sendStartCommand();
bool sendStopCommand();
bool sendOnceCommand();
bool sendExposureCommand(int exposureTime);
// 发送传输开关命令
bool sendEnableLeftIR();
bool sendDisableLeftIR();
bool sendEnableRightIR();
bool sendDisableRightIR();
bool sendEnableRGB();
bool sendDisableRGB();
// 发送单目/双目模式切换命令
bool sendMonocularMode();
bool sendBinocularMode();
signals:
void connected();
void disconnected();
void errorOccurred(const QString &error);
void dataReceived(const QByteArray &data);
void imageReceived(const QImage &image, uint32_t blockId);
void leftImageReceived(const QByteArray &jpegData, uint32_t blockId);
void rightImageReceived(const QByteArray &jpegData, uint32_t blockId);
void rgbImageReceived(const QByteArray &jpegData, uint32_t blockId);
void depthDataReceived(const QByteArray &depthData, uint32_t blockId);
void pointCloudDataReceived(const QByteArray &cloudData, uint32_t blockId);

View File

@@ -284,30 +284,29 @@ void PointCloudProcessor::processPointCloudData(const QByteArray &cloudData, uin
const int16_t* cloudShort = reinterpret_cast<const int16_t*>(cloudData.constData());
if (isZOnly) {
// Z-only格式直接映射为平面点云(类似图像平面
// X对应列左右翻转Y对应行上下翻转Z是深度值
// qDebug() << "[PointCloud] Processing Z-only format as planar mapping";
// Z-only格式转换为正交投影(柱形
for (size_t i = 0; i < m_totalPoints; i++) {
int row = i / m_imageWidth;
int col = i % m_imageWidth;
// 直接映射X=列翻转Y=行翻转Z=深度
float x = static_cast<float>(m_imageWidth - 1 - col); // 左右翻转
float y = static_cast<float>(m_imageHeight - 1 - row); // 上下翻转
// 读取深度值(单位:毫米)
float z = static_cast<float>(cloudShort[i]) * m_zScale;
cloud->points[i].x = x;
cloud->points[i].y = y;
// 正交投影X、Y使用像素坐标Y轴翻转以修正镜像
cloud->points[i].x = static_cast<float>(col);
cloud->points[i].y = static_cast<float>(m_imageHeight - 1 - row);
cloud->points[i].z = z;
}
} else {
// XYZ格式完整的三维坐标
// qDebug() << "[PointCloud] Processing XYZ format";
// 转换为正交投影柱形使用像素坐标作为X、Y
for (size_t i = 0; i < m_totalPoints; i++) {
cloud->points[i].x = static_cast<float>(cloudShort[i * 3 + 0]) * m_zScale;
cloud->points[i].y = static_cast<float>(cloudShort[i * 3 + 1]) * m_zScale;
int row = i / m_imageWidth;
int col = i % m_imageWidth;
// 正交投影X、Y使用像素坐标Y轴翻转以修正镜像Z使用深度值
cloud->points[i].x = static_cast<float>(col);
cloud->points[i].y = static_cast<float>(m_imageHeight - 1 - row);
cloud->points[i].z = static_cast<float>(cloudShort[i * 3 + 2]) * m_zScale;
}
}

File diff suppressed because it is too large Load Diff

View File

@@ -35,7 +35,6 @@ private slots:
// 相机控制槽函数
void onStartClicked();
void onStopClicked();
void onOnceClicked();
void onExposureChanged(int value);
void onCaptureClicked();
void onBrowseSavePathClicked();
@@ -51,6 +50,9 @@ private slots:
// GVSP数据处理槽函数
void onImageReceived(const QImage &image, uint32_t blockId);
void onLeftImageReceived(const QByteArray &jpegData, uint32_t blockId);
void onRightImageReceived(const QByteArray &jpegData, uint32_t blockId);
void onRgbImageReceived(const QByteArray &jpegData, uint32_t blockId);
void onDepthDataReceived(const QByteArray &depthData, uint32_t blockId);
void onPointCloudDataReceived(const QByteArray &cloudData, uint32_t blockId);
void onPointCloudReady(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud, uint32_t blockId);
@@ -84,7 +86,9 @@ private:
bool savePointCloud(const QString &dir, const QString &baseName, const QString &format);
void performBackgroundSave(const QString &saveDir, const QString &baseName,
const QString &depthFormat, const QString &pointCloudFormat,
const QImage &image, pcl::PointCloud<pcl::PointXYZ>::Ptr cloud);
const QImage &image, pcl::PointCloud<pcl::PointXYZ>::Ptr cloud,
const QByteArray &leftIRData, const QByteArray &rightIRData,
const QByteArray &rgbData);
// 日志辅助函数
void addLog(const QString &message, const QString &level = "INFO");
@@ -102,28 +106,45 @@ private:
// 状态变量
bool m_isConnected;
bool m_autoSaveOnNextFrame;
// 当前帧数据(用于拍照)
QImage m_currentImage;
pcl::PointCloud<pcl::PointXYZ>::Ptr m_currentPointCloud;
uint32_t m_currentFrameId;
// 三个相机的原始JPEG数据用于保存完整分辨率图像
QByteArray m_currentLeftIRData;
QByteArray m_currentRightIRData;
QByteArray m_currentRgbData;
// UI控件指针
class QWidget *m_centralWidget;
class QSplitter *m_mainSplitter;
class QWidget *m_controlPanel;
class QLabel *m_imageDisplay;
class QLabel *m_imageDisplay; // 兼容旧代码
PointCloudWidget *m_pointCloudWidget;
// 三个图像显示控件
QLabel *m_leftImageDisplay;
QLabel *m_rightImageDisplay;
QLabel *m_rgbImageDisplay;
// 按钮控件
QPushButton *m_refreshBtn;
QPushButton *m_connectBtn;
QPushButton *m_startBtn;
QPushButton *m_stopBtn;
QPushButton *m_onceBtn;
QPushButton *m_captureBtn;
// 数据传输开关按钮
QPushButton *m_leftIRToggle;
QPushButton *m_rightIRToggle;
QPushButton *m_rgbToggle;
// 单目/双目模式切换按钮
QPushButton *m_monocularBtn;
QPushButton *m_binocularBtn;
// 输入控件
QSlider *m_exposureSlider;
QSpinBox *m_exposureSpinBox;
@@ -146,6 +167,11 @@ private:
QLabel *m_resolutionLabel;
QLabel *m_queueLabel;
// 三个相机的FPS标签
QLabel *m_leftFpsLabel;
QLabel *m_rightFpsLabel;
QLabel *m_rgbFpsLabel;
// 日志显示控件
class QTextEdit *m_logDisplay;
QPushButton *m_clearLogBtn;
@@ -162,6 +188,33 @@ private:
int m_pointCloudFrameCount;
int m_totalPointCloudFrameCount;
double m_currentPointCloudFps;
// 统计数据 - 左红外
QDateTime m_lastLeftFrameTime;
int m_leftFrameCount;
int m_totalLeftFrameCount;
double m_currentLeftFps;
// 统计数据 - 右红外
QDateTime m_lastRightFrameTime;
int m_rightFrameCount;
int m_totalRightFrameCount;
double m_currentRightFps;
// 统计数据 - RGB
QDateTime m_lastRgbFrameTime;
int m_rgbFrameCount;
int m_totalRgbFrameCount;
double m_currentRgbFps;
// RGB解码处理标志防止线程积压
QAtomicInt m_rgbProcessing;
int m_rgbSkipCounter; // RGB帧跳过计数器
// 相机启用状态标志(防止关闭后闪烁)
QAtomicInt m_leftIREnabled;
QAtomicInt m_rightIREnabled;
QAtomicInt m_rgbEnabled;
};
#endif // MAINWINDOW_H

View File

@@ -9,9 +9,11 @@ PointCloudGLWidget::PointCloudGLWidget(QWidget *parent)
, m_vertexBuffer(nullptr)
, m_vao(nullptr)
, m_pointCount(0)
, m_fixedCenter(0.0f, 0.0f, 0.0f)
, m_centerInitialized(false)
, m_orthoSize(2000.0f) // 正交投影视野大小
, m_rotationX(0.0f) // 从正面看0度
, m_rotationY(0.0f) // 从正面看0度
, m_rotationY(0.0f) // 不旋转Y轴
, m_translation(0.0f, 0.0f, 0.0f)
, m_leftButtonPressed(false)
, m_rightButtonPressed(false)
@@ -67,7 +69,7 @@ void PointCloudGLWidget::setupShaders()
uniform mat4 mvp;
void main() {
gl_Position = mvp * vec4(position, 1.0);
gl_PointSize = 2.0;
gl_PointSize = 1.0; // 减小点的大小
}
)";
@@ -221,6 +223,27 @@ void PointCloudGLWidget::updatePointCloud(pcl::PointCloud<pcl::PointXYZ>::Ptr cl
m_pointCount = m_vertices.size() / 3;
// 计算点云中心并进行中心化处理
if (m_pointCount > 0) {
float centerX = (minX + maxX) / 2.0f;
float centerY = (minY + maxY) / 2.0f;
float centerZ = (minZ + maxZ) / 2.0f;
// 第一帧:初始化固定中心点
if (!m_centerInitialized) {
m_fixedCenter = QVector3D(centerX, centerY, centerZ);
m_centerInitialized = true;
qDebug() << "[PointCloudGLWidget] Fixed center initialized:" << m_fixedCenter;
}
// 使用固定的中心点进行中心化(避免抖动)
for (size_t i = 0; i < m_vertices.size(); i += 3) {
m_vertices[i] -= m_fixedCenter.x(); // X坐标
m_vertices[i + 1] -= m_fixedCenter.y(); // Y坐标
m_vertices[i + 2] -= m_fixedCenter.z(); // Z坐标
}
}
// 添加调试日志
static int updateCount = 0;
if (updateCount < 3 || updateCount % 100 == 0) {