feat: 添加点云着色按钮 适配调整后的下位机图像采集方式

This commit is contained in:
2026-02-02 14:08:30 +08:00
parent de8ce7c4a6
commit b1871aa9e7
11 changed files with 298 additions and 125 deletions

View File

@@ -22,11 +22,15 @@
#define PIXEL_FORMAT_MONO16 0x01100005 // Mono16 format (legacy)
#define PIXEL_FORMAT_MONO16_LEFT 0x01100006 // Mono16 format for left IR camera
#define PIXEL_FORMAT_MONO16_RIGHT 0x01100007 // Mono16 format for right IR camera
#define PIXEL_FORMAT_MONO8_LEFT 0x01080006 // Mono8 format for left IR camera (downsampled)
#define PIXEL_FORMAT_MONO8_RIGHT 0x01080007 // Mono8 format for right IR camera (downsampled)
#define PIXEL_FORMAT_MJPEG 0x02180001 // MJPEG format for RGB camera
// Image dimensions
#define IMAGE_WIDTH 1224
#define IMAGE_HEIGHT 1024
#define IR_DISPLAY_WIDTH 612 // Downsampled IR display width
#define IR_DISPLAY_HEIGHT 512 // Downsampled IR display height
#define RGB_WIDTH 1920
#define RGB_HEIGHT 1080

View File

@@ -23,6 +23,8 @@ public:
~PointCloudGLWidget();
void updatePointCloud(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud);
void setColorMode(bool enabled) { m_colorMode = enabled ? 1 : 0; update(); }
bool colorMode() const { return m_colorMode != 0; }
protected:
void initializeGL() override;
@@ -48,25 +50,31 @@ private:
// 点云数据
std::vector<float> m_vertices;
int m_pointCount;
// 固定的点云中心点(避免抖动)
QVector3D m_fixedCenter;
bool m_centerInitialized;
float m_minZ, m_maxZ; // 深度范围(用于着色)
// 相机参数
QMatrix4x4 m_projection;
QMatrix4x4 m_view;
QMatrix4x4 m_model;
float m_orthoSize; // 正交投影视野大小(控制缩放)
float m_fov; // 透视投影视场角
float m_rotationX; // X轴旋转角度
float m_rotationY; // Y轴旋转角度
QVector3D m_translation; // 平移
QVector3D m_cloudCenter; // 点云中心
float m_viewDistance; // 观察距离
QVector3D m_panOffset; // 用户平移偏移
float m_zoom; // 缩放因子
// 鼠标交互状态
QPoint m_lastMousePos;
bool m_leftButtonPressed;
bool m_rightButtonPressed;
// 首帧标志(只在首帧时自动居中)
bool m_firstFrame;
// 颜色模式0=黑白1=彩色)
int m_colorMode;
};
#endif // POINTCLOUDGLWIDGET_H

View File

@@ -19,6 +19,10 @@ public:
// 更新点云显示
void updatePointCloud(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud);
// 颜色模式控制
void setColorMode(bool enabled);
bool colorMode() const;
private:
QLabel *m_statusLabel;
PointCloudGLWidget *m_glWidget;

View File

@@ -45,7 +45,7 @@ void ConfigManager::setDataPort(int port)
// ========== 相机配置 ==========
int ConfigManager::getExposureTime() const
{
return m_settings->value("Camera/ExposureTime", 10000).toInt();
return m_settings->value("Camera/ExposureTime", 1000).toInt();
}
void ConfigManager::setExposureTime(int exposure)

View File

@@ -94,7 +94,7 @@ void DeviceScanner::onReadyRead()
if (response.contains("D330M_CAMERA")) {
DeviceInfo device;
device.ipAddress = senderIp;
device.deviceName = "D330M Camera";
device.deviceName = "Camera";
device.port = SCAN_PORT;
device.responseTime = 0;

View File

@@ -96,9 +96,9 @@ void GVSPParser::handleLeaderPacket(const uint8_t *data, size_t size)
// 根据像素格式选择对应的状态
StreamState *state = nullptr;
if (pixelFormat == PIXEL_FORMAT_MONO16_LEFT) {
if (pixelFormat == PIXEL_FORMAT_MONO16_LEFT || pixelFormat == PIXEL_FORMAT_MONO8_LEFT) {
state = &m_leftIRState;
} else if (pixelFormat == PIXEL_FORMAT_MONO16_RIGHT) {
} else if (pixelFormat == PIXEL_FORMAT_MONO16_RIGHT || pixelFormat == PIXEL_FORMAT_MONO8_RIGHT) {
state = &m_rightIRState;
} else if (pixelFormat == PIXEL_FORMAT_MJPEG) {
state = &m_rgbState;
@@ -118,6 +118,9 @@ void GVSPParser::handleLeaderPacket(const uint8_t *data, size_t size)
if (pixelFormat == PIXEL_FORMAT_MJPEG) {
// MJPEG是压缩格式实际大小未知设置为0表示动态接收
state->expectedSize = 0;
} else if (pixelFormat == PIXEL_FORMAT_MONO8_LEFT || pixelFormat == PIXEL_FORMAT_MONO8_RIGHT) {
// 8-bit灰度格式下采样
state->expectedSize = imageWidth * imageHeight;
} else {
// 16-bit或12-bit灰度等固定格式
state->expectedSize = imageWidth * imageHeight * 2;
@@ -268,6 +271,29 @@ void GVSPParser::processImageData(GVSPParser::StreamState *state)
return;
}
// 处理Mono8格式左右红外相机下采样8位数据
if (state->pixelFormat == PIXEL_FORMAT_MONO8_LEFT) {
// 检查数据大小
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
// 左红外8位数据
emit leftImageReceived(state->dataBuffer, state->blockId);
m_imageSequence++;
return;
}
if (state->pixelFormat == PIXEL_FORMAT_MONO8_RIGHT) {
// 检查数据大小
if (state->dataBuffer.size() < state->expectedSize) {
return;
}
// 右红外8位数据
emit rightImageReceived(state->dataBuffer, state->blockId);
m_imageSequence++;
return;
}
// 兼容旧版本使用序号区分legacy
if (state->pixelFormat == PIXEL_FORMAT_MONO16) {
// 检查数据大小

View File

@@ -134,23 +134,8 @@ bool NetworkManager::sendStopCommand()
bool NetworkManager::sendExposureCommand(int exposureTime)
{
// 同时发送结构光曝光命令UART控制激光器单位μs
QString exposureCommand = QString("EXPOSURE:%1").arg(exposureTime);
bool success1 = sendCommand(exposureCommand);
// 同时发送红外相机曝光命令通过触发脉冲宽度控制单位μs
// 下位机会将此值用作manual_trigger_pulse()的脉冲宽度参数
// 脉冲宽度直接决定相机的实际曝光时间
int irExposure = exposureTime;
// 限制在有效范围内1000μs ~ 100000μs避免脉冲太短导致相机无法触发
if (irExposure < 1000) irExposure = 1000;
if (irExposure > 100000) irExposure = 100000;
QString irExposureCommand = QString("IR_EXPOSURE:%1").arg(irExposure);
bool success2 = sendCommand(irExposureCommand);
return success1 && success2;
return sendCommand(exposureCommand);
}
// ========== 传输开关命令 ==========

View File

@@ -66,9 +66,11 @@ MainWindow::MainWindow(QWidget *parent)
, m_rgbSkipCounter(0)
{
m_rgbProcessing.storeRelaxed(0); // 初始化RGB处理标志
m_leftIREnabled.storeRelaxed(1); // 初始化左红外启用标志(默认启用)
m_rightIREnabled.storeRelaxed(1); // 初始化右红外启用标志(默认启用)
m_rgbEnabled.storeRelaxed(1); // 初始化RGB启用标志(默认用)
m_leftIRProcessing.storeRelaxed(0); // 初始化左红外处理标志
m_rightIRProcessing.storeRelaxed(0); // 初始化右红外处理标志
m_leftIREnabled.storeRelaxed(0); // 初始化左红外启用标志(默认用)
m_rightIREnabled.storeRelaxed(0); // 初始化右红外启用标志(默认禁用)
m_rgbEnabled.storeRelaxed(0); // 初始化RGB启用标志默认禁用
setupUI();
setupConnections();
loadSettings();
@@ -105,7 +107,7 @@ MainWindow::~MainWindow()
void MainWindow::setupUI()
{
setWindowTitle("D330Viewer - D330M 相机控制");
setWindowTitle("河北省科学院应用数学研究所");
resize(1280, 720); // 16:9 比例
// 设置应用程序图标
@@ -187,9 +189,9 @@ void MainWindow::setupUI()
m_rightIRToggle->setCheckable(true);
m_rgbToggle->setCheckable(true);
m_leftIRToggle->setChecked(true);
m_rightIRToggle->setChecked(true);
m_rgbToggle->setChecked(true);
m_leftIRToggle->setChecked(false);
m_rightIRToggle->setChecked(false);
m_rgbToggle->setChecked(false);
m_leftIRToggle->setFixedHeight(32);
m_rightIRToggle->setFixedHeight(32);
@@ -219,6 +221,17 @@ void MainWindow::setupUI()
toolBarLayout->addWidget(m_rightIRToggle);
toolBarLayout->addWidget(m_rgbToggle);
toolBarLayout->addSpacing(10);
// 点云颜色开关按钮
m_pointCloudColorToggle = new QPushButton("点云着色", topToolBar);
m_pointCloudColorToggle->setCheckable(true);
m_pointCloudColorToggle->setChecked(false);
m_pointCloudColorToggle->setFixedHeight(32);
m_pointCloudColorToggle->setToolTip("开启/关闭点云深度着色");
m_pointCloudColorToggle->setStyleSheet(toggleStyle);
toolBarLayout->addWidget(m_pointCloudColorToggle);
toolBarLayout->addSpacing(20);
// 单目/双目模式切换按钮
@@ -363,12 +376,12 @@ void MainWindow::setupUI()
QHBoxLayout *exposureLayout = new QHBoxLayout();
m_exposureSlider = new QSlider(Qt::Horizontal, exposureGroup);
m_exposureSlider->setRange(1000, 100000);
m_exposureSlider->setValue(10000);
m_exposureSlider->setRange(100, 100000);
m_exposureSlider->setValue(1000);
m_exposureSpinBox = new QSpinBox(exposureGroup);
m_exposureSpinBox->setRange(1000, 100000);
m_exposureSpinBox->setValue(10000);
m_exposureSpinBox->setRange(100, 100000);
m_exposureSpinBox->setValue(1000);
m_exposureSpinBox->setMinimumWidth(80);
exposureLayout->addWidget(m_exposureSlider, 3);
@@ -704,6 +717,14 @@ void MainWindow::setupConnections()
}
});
// 点云颜色开关连接
connect(m_pointCloudColorToggle, &QPushButton::toggled, this, [this](bool checked) {
if (m_pointCloudWidget) {
m_pointCloudWidget->setColorMode(checked);
qDebug() << "点云着色:" << (checked ? "开启" : "关闭");
}
});
// 单目/双目模式切换按钮连接
connect(m_monocularBtn, &QPushButton::clicked, this, [this]() {
m_monocularBtn->setChecked(true);
@@ -1002,12 +1023,34 @@ void MainWindow::onLeftImageReceived(const QByteArray &jpegData, uint32_t blockI
m_lastLeftFrameTime = currentTime;
}
// 使用后台线程处理16位原始数据避免阻塞UI
// 使用后台线程处理红外数据避免阻塞UI
if (m_leftImageDisplay && jpegData.size() > 0) {
// 检查数据大小是否为16位图像
size_t expectedSize = 1224 * 1024 * sizeof(uint16_t);
if (jpegData.size() == expectedSize) {
// 复制数据到局部变量
// 检查数据大小8位下采样(612x512)或16位原始(1224x1024)
size_t size8bit = 612 * 512;
size_t size16bit = 1224 * 1024 * sizeof(uint16_t);
if (jpegData.size() == size8bit) {
// 8位下采样格式直接显示
QByteArray dataCopy = jpegData;
QtConcurrent::run([this, dataCopy]() {
try {
QImage image(reinterpret_cast<const uchar*>(dataCopy.constData()),
612, 512, 612, QImage::Format_Grayscale8);
QImage imageCopy = image.copy();
QMetaObject::invokeMethod(this, [this, imageCopy]() {
if (m_leftImageDisplay) {
QPixmap pixmap = QPixmap::fromImage(imageCopy);
m_leftImageDisplay->setPixmap(pixmap.scaled(
m_leftImageDisplay->size(), Qt::KeepAspectRatio, Qt::FastTransformation));
}
}, Qt::QueuedConnection);
} catch (const std::exception &e) {
qDebug() << "[MainWindow] ERROR: Left IR 8bit processing exception:" << e.what();
}
});
} else if (jpegData.size() == size16bit) {
// 16位原始格式需要归一化处理
QByteArray dataCopy = jpegData;
// 在后台线程处理
@@ -1095,7 +1138,8 @@ void MainWindow::onLeftImageReceived(const QByteArray &jpegData, uint32_t blockI
}
});
} else {
qDebug() << "[MainWindow] ERROR: Left IR data size mismatch:" << jpegData.size();
qDebug() << "[MainWindow] ERROR: Left IR data size mismatch:" << jpegData.size()
<< "(expected 8bit:" << size8bit << "or 16bit:" << size16bit << ")";
}
}
}
@@ -1126,12 +1170,34 @@ void MainWindow::onRightImageReceived(const QByteArray &jpegData, uint32_t block
m_lastRightFrameTime = currentTime;
}
// 使用后台线程处理16位原始数据避免阻塞UI
// 使用后台线程处理红外数据避免阻塞UI
if (m_rightImageDisplay && jpegData.size() > 0) {
// 检查数据大小是否为16位图像
size_t expectedSize = 1224 * 1024 * sizeof(uint16_t);
if (jpegData.size() == expectedSize) {
// 复制数据到局部变量
// 检查数据大小8位下采样(612x512)或16位原始(1224x1024)
size_t size8bit = 612 * 512;
size_t size16bit = 1224 * 1024 * sizeof(uint16_t);
if (jpegData.size() == size8bit) {
// 8位下采样格式直接显示
QByteArray dataCopy = jpegData;
QtConcurrent::run([this, dataCopy]() {
try {
QImage image(reinterpret_cast<const uchar*>(dataCopy.constData()),
612, 512, 612, QImage::Format_Grayscale8);
QImage imageCopy = image.copy();
QMetaObject::invokeMethod(this, [this, imageCopy]() {
if (m_rightImageDisplay) {
QPixmap pixmap = QPixmap::fromImage(imageCopy);
m_rightImageDisplay->setPixmap(pixmap.scaled(
m_rightImageDisplay->size(), Qt::KeepAspectRatio, Qt::FastTransformation));
}
}, Qt::QueuedConnection);
} catch (const std::exception &e) {
qDebug() << "[MainWindow] ERROR: Right IR 8bit processing exception:" << e.what();
}
});
} else if (jpegData.size() == size16bit) {
// 16位原始格式需要归一化处理
QByteArray dataCopy = jpegData;
// 在后台线程处理
@@ -1219,7 +1285,8 @@ void MainWindow::onRightImageReceived(const QByteArray &jpegData, uint32_t block
}
});
} else {
qDebug() << "[MainWindow] ERROR: Right IR data size mismatch:" << jpegData.size();
qDebug() << "[MainWindow] ERROR: Right IR data size mismatch:" << jpegData.size()
<< "(expected 8bit:" << size8bit << "or 16bit:" << size16bit << ")";
}
}
}
@@ -1529,74 +1596,100 @@ void MainWindow::performBackgroundSave(const QString &saveDir, const QString &ba
bool rightIRSuccess = false;
bool rgbSuccess = false;
// 保存左红外图像16位原始数据,1224×1024
// 保存左红外图像(支持16位原始1224×1024或8位下采样612×512
if (!leftIRData.isEmpty()) {
try {
size_t expectedSize = 1224 * 1024 * sizeof(uint16_t);
if (leftIRData.size() == expectedSize) {
size_t size16bit = 1224 * 1024 * sizeof(uint16_t);
size_t size8bit = 612 * 512;
if (leftIRData.size() == size16bit) {
const uint16_t* src = reinterpret_cast<const uint16_t*>(leftIRData.constData());
// 创建16位灰度图像
cv::Mat leftIR16(1024, 1224, CV_16UC1);
memcpy(leftIR16.data, src, expectedSize);
memcpy(leftIR16.data, src, size16bit);
// 根据depthFormat参数保存
if (depthFormat == "png" || depthFormat == "both") {
// 保存PNG格式8位
QString pngPath = QString("%1/%2_left_ir.png").arg(saveDir).arg(baseName);
cv::Mat leftIR8;
leftIR16.convertTo(leftIR8, CV_8UC1, 255.0 / 65535.0);
cv::imwrite(pngPath.toStdString(), leftIR8);
qDebug() << "保存左红外PNG图像:" << pngPath;
qDebug() << "保存左红外PNG图像(16bit):" << pngPath;
leftIRSuccess = true;
}
if (depthFormat == "tiff" || depthFormat == "both") {
// 保存TIFF格式保留16位精度
QString tiffPath = QString("%1/%2_left_ir.tiff").arg(saveDir).arg(baseName);
cv::imwrite(tiffPath.toStdString(), leftIR16);
qDebug() << "保存左红外TIFF图像(16位):" << tiffPath;
leftIRSuccess = true;
}
} else if (leftIRData.size() == size8bit) {
cv::Mat leftIR8(512, 612, CV_8UC1, const_cast<char*>(leftIRData.constData()));
cv::Mat leftIR8Clone = leftIR8.clone();
if (depthFormat == "png" || depthFormat == "both") {
QString pngPath = QString("%1/%2_left_ir.png").arg(saveDir).arg(baseName);
cv::imwrite(pngPath.toStdString(), leftIR8Clone);
qDebug() << "保存左红外PNG图像(8bit下采样):" << pngPath;
leftIRSuccess = true;
}
if (depthFormat == "tiff" || depthFormat == "both") {
QString tiffPath = QString("%1/%2_left_ir.tiff").arg(saveDir).arg(baseName);
cv::imwrite(tiffPath.toStdString(), leftIR8Clone);
qDebug() << "保存左红外TIFF图像(8bit下采样):" << tiffPath;
leftIRSuccess = true;
}
} else {
qDebug() << "左红外数据大小不匹配:" << leftIRData.size();
qDebug() << "左红外数据大小不匹配:" << leftIRData.size()
<< "(期望16bit:" << size16bit << "或8bit:" << size8bit << ")";
}
} catch (const std::exception &e) {
qDebug() << "保存左红外图像失败:" << e.what();
}
}
// 保存右红外图像16位原始数据,1224×1024
// 保存右红外图像(支持16位原始1224×1024或8位下采样612×512
if (!rightIRData.isEmpty()) {
try {
size_t expectedSize = 1224 * 1024 * sizeof(uint16_t);
if (rightIRData.size() == expectedSize) {
size_t size16bit = 1224 * 1024 * sizeof(uint16_t);
size_t size8bit = 612 * 512;
if (rightIRData.size() == size16bit) {
const uint16_t* src = reinterpret_cast<const uint16_t*>(rightIRData.constData());
// 创建16位灰度图像
cv::Mat rightIR16(1024, 1224, CV_16UC1);
memcpy(rightIR16.data, src, expectedSize);
memcpy(rightIR16.data, src, size16bit);
// 根据depthFormat参数保存
if (depthFormat == "png" || depthFormat == "both") {
// 保存PNG格式8位
QString pngPath = QString("%1/%2_right_ir.png").arg(saveDir).arg(baseName);
cv::Mat rightIR8;
rightIR16.convertTo(rightIR8, CV_8UC1, 255.0 / 65535.0);
cv::imwrite(pngPath.toStdString(), rightIR8);
qDebug() << "保存右红外PNG图像:" << pngPath;
qDebug() << "保存右红外PNG图像(16bit):" << pngPath;
rightIRSuccess = true;
}
if (depthFormat == "tiff" || depthFormat == "both") {
// 保存TIFF格式保留16位精度
QString tiffPath = QString("%1/%2_right_ir.tiff").arg(saveDir).arg(baseName);
cv::imwrite(tiffPath.toStdString(), rightIR16);
qDebug() << "保存右红外TIFF图像(16位):" << tiffPath;
rightIRSuccess = true;
}
} else if (rightIRData.size() == size8bit) {
cv::Mat rightIR8(512, 612, CV_8UC1, const_cast<char*>(rightIRData.constData()));
cv::Mat rightIR8Clone = rightIR8.clone();
if (depthFormat == "png" || depthFormat == "both") {
QString pngPath = QString("%1/%2_right_ir.png").arg(saveDir).arg(baseName);
cv::imwrite(pngPath.toStdString(), rightIR8Clone);
qDebug() << "保存右红外PNG图像(8bit下采样):" << pngPath;
rightIRSuccess = true;
}
if (depthFormat == "tiff" || depthFormat == "both") {
QString tiffPath = QString("%1/%2_right_ir.tiff").arg(saveDir).arg(baseName);
cv::imwrite(tiffPath.toStdString(), rightIR8Clone);
qDebug() << "保存右红外TIFF图像(8bit下采样):" << tiffPath;
rightIRSuccess = true;
}
} else {
qDebug() << "右红外数据大小不匹配:" << rightIRData.size();
qDebug() << "右红外数据大小不匹配:" << rightIRData.size()
<< "(期望16bit:" << size16bit << "或8bit:" << size8bit << ")";
}
} catch (const std::exception &e) {
qDebug() << "保存右红外图像失败:" << e.what();

View File

@@ -140,6 +140,7 @@ private:
QPushButton *m_leftIRToggle;
QPushButton *m_rightIRToggle;
QPushButton *m_rgbToggle;
QPushButton *m_pointCloudColorToggle; // 点云颜色开关
// 单目/双目模式切换按钮
QPushButton *m_monocularBtn;
@@ -207,8 +208,10 @@ private:
int m_totalRgbFrameCount;
double m_currentRgbFps;
// RGB解码处理标志(防止线程积压)
// 解码处理标志(防止线程积压导致闪烁
QAtomicInt m_rgbProcessing;
QAtomicInt m_leftIRProcessing;
QAtomicInt m_rightIRProcessing;
int m_rgbSkipCounter; // RGB帧跳过计数器
// 相机启用状态标志(防止关闭后闪烁)

View File

@@ -9,14 +9,19 @@ PointCloudGLWidget::PointCloudGLWidget(QWidget *parent)
, m_vertexBuffer(nullptr)
, m_vao(nullptr)
, m_pointCount(0)
, m_fixedCenter(0.0f, 0.0f, 0.0f)
, m_centerInitialized(false)
, m_orthoSize(2000.0f) // 正交投影视野大小
, m_minZ(0.0f)
, m_maxZ(1000.0f)
, m_fov(60.0f) // 透视投影视场角
, m_rotationX(0.0f) // 从正面看0度
, m_rotationY(0.0f) // 不旋转Y轴
, m_translation(0.0f, 0.0f, 0.0f)
, m_cloudCenter(0.0f, 0.0f, 0.0f)
, m_viewDistance(1000.0f)
, m_panOffset(0.0f, 0.0f, 0.0f)
, m_zoom(1.0f) // 缩放因子
, m_leftButtonPressed(false)
, m_rightButtonPressed(false)
, m_firstFrame(true)
, m_colorMode(0) // 默认黑白模式
{
setMinimumSize(400, 400);
}
@@ -42,7 +47,7 @@ void PointCloudGLWidget::initializeGL()
{
initializeOpenGLFunctions();
glClearColor(0.0f, 0.0f, 0.0f, 1.0f);
glClearColor(0.1f, 0.1f, 0.15f, 1.0f); // 深灰色背景
glEnable(GL_DEPTH_TEST);
glEnable(GL_PROGRAM_POINT_SIZE);
@@ -67,18 +72,39 @@ void PointCloudGLWidget::setupShaders()
#version 330 core
layout(location = 0) in vec3 position;
uniform mat4 mvp;
uniform float minZ;
uniform float maxZ;
out float depth;
void main() {
gl_Position = mvp * vec4(position, 1.0);
gl_PointSize = 1.0; // 减小点的大小
gl_PointSize = 1.0;
depth = (position.z - minZ) / (maxZ - minZ);
}
)";
// 片段着色器
// 片段着色器 - 支持黑白和彩色两种模式
const char *fragmentShaderSource = R"(
#version 330 core
in float depth;
uniform int colorMode;
out vec4 fragColor;
void main() {
fragColor = vec4(1.0, 1.0, 1.0, 1.0);
float d = clamp(depth, 0.0, 1.0);
if (colorMode == 0) {
fragColor = vec4(1.0, 1.0, 1.0, 1.0);
} else {
vec3 color;
if (d < 0.25) {
color = mix(vec3(0.0, 0.0, 1.0), vec3(0.0, 1.0, 1.0), d * 4.0);
} else if (d < 0.5) {
color = mix(vec3(0.0, 1.0, 1.0), vec3(0.0, 1.0, 0.0), (d - 0.25) * 4.0);
} else if (d < 0.75) {
color = mix(vec3(0.0, 1.0, 0.0), vec3(1.0, 1.0, 0.0), (d - 0.5) * 4.0);
} else {
color = mix(vec3(1.0, 1.0, 0.0), vec3(1.0, 0.0, 0.0), (d - 0.75) * 4.0);
}
fragColor = vec4(color, 1.0);
}
}
)";
@@ -99,11 +125,9 @@ void PointCloudGLWidget::resizeGL(int w, int h)
{
m_projection.setToIdentity();
// 使用正交投影代替透视投影,避免"喷射状"效果
// 使用透视投影从相机原点看向Z轴正方向
float aspect = float(w) / float(h);
m_projection.ortho(-m_orthoSize * aspect, m_orthoSize * aspect,
-m_orthoSize, m_orthoSize,
-50000.0f, 50000.0f); // 近平面和远平面
m_projection.perspective(m_fov, aspect, 1.0f, 50000.0f);
}
void PointCloudGLWidget::paintGL()
@@ -114,18 +138,22 @@ void PointCloudGLWidget::paintGL()
return;
}
// 每帧重新计算正交投影矩阵确保使用最新的m_orthoSize
// 重新计算透视投影矩阵
m_projection.setToIdentity();
float aspect = float(width()) / float(height());
m_projection.ortho(-m_orthoSize * aspect, m_orthoSize * aspect,
-m_orthoSize, m_orthoSize,
-50000.0f, 50000.0f);
m_projection.perspective(m_fov / m_zoom, aspect, 1.0f, 50000.0f);
// 设置view矩阵
// 设置view矩阵 - 轨道相机模式(围绕点云中心旋转)
m_view.setToIdentity();
// 1. 用户平移偏移
m_view.translate(m_panOffset);
// 2. 相机后退到观察距离
m_view.translate(0.0f, 0.0f, -m_viewDistance);
// 3. 应用旋转(围绕原点,即点云中心)
m_view.rotate(m_rotationX, 1.0f, 0.0f, 0.0f);
m_view.rotate(m_rotationY, 0.0f, 1.0f, 0.0f);
m_view.translate(m_translation);
// 4. 将点云中心移到原点
m_view.translate(-m_cloudCenter);
// 设置model矩阵
m_model.setToIdentity();
@@ -136,6 +164,9 @@ void PointCloudGLWidget::paintGL()
// 绑定shader和设置uniform
m_program->bind();
m_program->setUniformValue("mvp", mvp);
m_program->setUniformValue("minZ", m_minZ);
m_program->setUniformValue("maxZ", m_maxZ);
m_program->setUniformValue("colorMode", m_colorMode);
// 绑定VAO和绘制
m_vao->bind();
@@ -166,10 +197,10 @@ void PointCloudGLWidget::mouseMoveEvent(QMouseEvent *event)
m_rotationY += delta.x() * 0.5f;
update();
} else if (m_rightButtonPressed) {
// 右键:平移(根据正交投影视野大小调整平移速度)
float scale = m_orthoSize * 0.002f;
m_translation.setX(m_translation.x() + delta.x() * scale);
m_translation.setY(m_translation.y() - delta.y() * scale);
// 右键:平移(根据观察距离调整平移速度)
float scale = m_viewDistance * 0.002f;
m_panOffset.setX(m_panOffset.x() + delta.x() * scale);
m_panOffset.setY(m_panOffset.y() - delta.y() * scale);
update();
}
}
@@ -185,12 +216,12 @@ void PointCloudGLWidget::mouseReleaseEvent(QMouseEvent *event)
void PointCloudGLWidget::wheelEvent(QWheelEvent *event)
{
// 滚轮:缩放(调整正交投影视野大小
// 滚轮:缩放(调整zoom因子
float delta = event->angleDelta().y() / 120.0f;
m_orthoSize *= (1.0f - delta * 0.1f);
m_orthoSize = qMax(100.0f, qMin(m_orthoSize, 10000.0f)); // 范围:100-10000
m_zoom *= (1.0f + delta * 0.1f);
m_zoom = qMax(0.1f, qMin(m_zoom, 10.0f)); // 范围:0.1-10倍
update(); // 触发重绘paintGL会使用新的m_orthoSize
update();
}
void PointCloudGLWidget::updatePointCloud(pcl::PointCloud<pcl::PointXYZ>::Ptr cloud)
@@ -199,8 +230,9 @@ void PointCloudGLWidget::updatePointCloud(pcl::PointCloud<pcl::PointXYZ>::Ptr cl
return;
}
// 过滤全零点并转换为顶点数组
// 过滤全零点并转换为顶点数组,同时计算包围盒
m_vertices.clear();
float minX = FLT_MAX, maxX = -FLT_MAX;
float minY = FLT_MAX, maxY = -FLT_MAX;
float minZ = FLT_MAX, maxZ = -FLT_MAX;
@@ -211,7 +243,7 @@ void PointCloudGLWidget::updatePointCloud(pcl::PointCloud<pcl::PointXYZ>::Ptr cl
m_vertices.push_back(point.y);
m_vertices.push_back(point.z);
// 统计坐标范围
// 更新包围盒
if (point.x < minX) minX = point.x;
if (point.x > maxX) maxX = point.x;
if (point.y < minY) minY = point.y;
@@ -223,37 +255,43 @@ void PointCloudGLWidget::updatePointCloud(pcl::PointCloud<pcl::PointXYZ>::Ptr cl
m_pointCount = m_vertices.size() / 3;
// 计算点云中心并进行中心化处理
// 保存深度范围用于着色
if (m_pointCount > 0) {
m_minZ = minZ;
m_maxZ = maxZ;
}
// 只在首帧时自动调整相机位置,之后保持用户交互状态
if (m_pointCount > 0 && m_firstFrame) {
// 计算点云中心
float centerX = (minX + maxX) / 2.0f;
float centerY = (minY + maxY) / 2.0f;
float centerZ = (minZ + maxZ) / 2.0f;
// 第一帧:初始化固定中心点
if (!m_centerInitialized) {
m_fixedCenter = QVector3D(centerX, centerY, centerZ);
m_centerInitialized = true;
qDebug() << "[PointCloudGLWidget] Fixed center initialized:" << m_fixedCenter;
}
// 计算点云尺寸
float depthRange = maxZ - minZ;
float sizeX = maxX - minX;
float sizeY = maxY - minY;
float maxSize = std::max({sizeX, sizeY, depthRange});
// 使用固定的中心点进行中心化(避免抖动)
for (size_t i = 0; i < m_vertices.size(); i += 3) {
m_vertices[i] -= m_fixedCenter.x(); // X坐标
m_vertices[i + 1] -= m_fixedCenter.y(); // Y坐标
m_vertices[i + 2] -= m_fixedCenter.z(); // Z坐标
}
}
// 设置点云中心
m_cloudCenter = QVector3D(centerX, centerY, centerZ);
// 添加调试日志
static int updateCount = 0;
if (updateCount < 3 || updateCount % 100 == 0) {
// qDebug() << "[PointCloudGLWidget] Update" << updateCount << "- Points:" << m_pointCount
// << "Total cloud size:" << cloud->size();
// qDebug() << " X range:" << minX << "to" << maxX;
// qDebug() << " Y range:" << minY << "to" << maxY;
// qDebug() << " Z range:" << minZ << "to" << maxZ;
// 计算观察距离,让相机从外部观察点云
m_viewDistance = maxSize * 1.5f;
// 重置平移偏移和旋转角度
m_panOffset = QVector3D(0.0f, 0.0f, 0.0f);
m_rotationX = 0.0f;
m_rotationY = 0.0f;
// 设置缩放
m_zoom = 1.0f;
qDebug() << "[PointCloudGLWidget] 首帧自动居中 - 点云中心:" << centerX << centerY << centerZ
<< "观察距离:" << m_viewDistance;
m_firstFrame = false; // 标记首帧已处理
}
updateCount++;
updateBuffers();
update();

View File

@@ -54,3 +54,15 @@ void PointCloudWidget::updatePointCloud(pcl::PointCloud<pcl::PointXYZ>::Ptr clou
// 更新OpenGL显示
m_glWidget->updatePointCloud(cloud);
}
void PointCloudWidget::setColorMode(bool enabled)
{
if (m_glWidget) {
m_glWidget->setColorMode(enabled);
}
}
bool PointCloudWidget::colorMode() const
{
return m_glWidget ? m_glWidget->colorMode() : false;
}