亚门钢太郎 2025-10-28 19:13 采纳率: 0%
浏览 14

osg渲染到纹理但是深度出错

问题遇到的现象和发生背景

刚学osg
使用的是osg3.4和oe2.7
问题是我需要使用后期处理渲染出雾效,但是渲染出来后,随着我摄像机移动我的雾效会闪烁
然后我通过着色器的调试判断问题应该出在深度值上面,因为我用深度值作为输出的话渲染出来的画面也是移动的时候全屏要闪烁
然后我这里的思路就是rtt先渲染出场景到纹理,我的hud再根据深度和矩阵按距离渲染出雾效
而且当我到一个的场景渲染的雾效不正确时,我先调用close再调用open之后雾效就正常了,但是一旦移动之后雾效又会错误
有佬能解答一下为什么吗

#include "OEPreciseHorizonBlurEffect.h"
#include <osgEarth/VirtualProgram>
#include <osgEarth/TerrainEngineNode>
#include <osgViewer/Viewer>
#include <osg/Geode>
#include <osg/Geometry>
#include <osg/TextureRectangle>
#include <iostream>

// 静态变量初始化
osg::ref_ptr<osg::Camera> OEPreciseHorizonBlurEffect::_rttCamera = nullptr;
osg::ref_ptr<osg::TextureRectangle> OEPreciseHorizonBlurEffect::_colorTexture = nullptr;
osg::ref_ptr<osg::TextureRectangle> OEPreciseHorizonBlurEffect::_depthTexture = nullptr;
osg::ref_ptr<osg::Camera> OEPreciseHorizonBlurEffect::_hudCamera = nullptr;
osg::ref_ptr<osg::Uniform> OEPreciseHorizonBlurEffect::_invViewProjMatrixUniform = nullptr;
osg::ref_ptr<osg::Uniform> OEPreciseHorizonBlurEffect::_cameraPosUniform = nullptr;
osg::ref_ptr<osg::Uniform> OEPreciseHorizonBlurEffect::_viewMatrixUniform = nullptr;
osg::ref_ptr<osg::Uniform> OEPreciseHorizonBlurEffect::_globalDensityUniform = nullptr;
osg::ref_ptr<osg::Uniform> OEPreciseHorizonBlurEffect::_fogColorUniform = nullptr;
osg::ref_ptr<osg::Uniform> OEPreciseHorizonBlurEffect::_heightFogBaseAltUniform = nullptr;
osg::ref_ptr<osg::Uniform> OEPreciseHorizonBlurEffect::_heightFogRangeUniform = nullptr;
osg::ref_ptr<osg::Uniform> OEPreciseHorizonBlurEffect::_depthFogStartUniform = nullptr;
osg::ref_ptr<osg::Uniform> OEPreciseHorizonBlurEffect::_depthFogEndUniform = nullptr;
osg::ref_ptr<osg::Uniform> OEPreciseHorizonBlurEffect::_zNearUniform = nullptr;
osg::ref_ptr<osg::Uniform> OEPreciseHorizonBlurEffect::_zFarUniform = nullptr;

float OEPreciseHorizonBlurEffect::_minHeight = -3000.0f;
float OEPreciseHorizonBlurEffect::_maxHeight = 30000.0f;

osg::ref_ptr<osg::Geode> OEPreciseHorizonBlurEffect::_quadGeode;
// 使用参考代码中的着色器风格
const char GaussianHorizonBlurShader[] = R"(
#version 120
#extension GL_ARB_texture_rectangle : enable
#extension GL_EXT_gpu_shader4 : enable

uniform sampler2DRect colorTex;
uniform sampler2DRect depthTex;
uniform mat4 invViewProjMatrix;
uniform mat4 ViewMatrix;
uniform vec3 cameraPos;
uniform vec3 earthCenterDir;
uniform float globalDensity;
uniform vec4 fogColor;
uniform float heightFogBaseAlt;
uniform float heightFogRange;
uniform float depthFogStart;
uniform float depthFogEnd;
uniform float zNear;
uniform float zFar;


// 重建世界坐标
vec3 reconstructWorldPosition(vec2 texCoord, float depth)
{
    float linearDepth = (2.0 * zNear * zFar) / (zFar + zNear - depth * (zFar - zNear));
    vec2 texSize = vec2(textureSize2DRect(colorTex, 0));
    vec2 texCoordNormalized = texCoord / texSize;
    vec4 ndcPos;
    ndcPos.x = texCoordNormalized.x * 2.0 - 1.0;
    ndcPos.y = (1.0 - texCoordNormalized.y) * 2.0 - 1.0;
    ndcPos.z = depth * 2.0 - 1.0;
    ndcPos.w = 1.0;
    
    vec4 worldPos = invViewProjMatrix * ndcPos;
    if (worldPos.w != 0.0) {
        worldPos.xyz /= worldPos.w;
    }
    return worldPos.xyz;
}

// 判断是否为天空
bool isSky(float depth)
{
    return depth ==1;
}

// 饱和度处理
float saturate(float x)
{
    return clamp(x, 0.0, 1.0);
}

// 计算某点的海拔高度(到地心距离 - 地球半径)
float getAltitude(vec3 worldPosition) {
    const float EARTH_RADIUS = 6378000.0; // 地球半径
    float distanceToCenter = length(worldPosition); // 点到地心的距离
    return distanceToCenter - EARTH_RADIUS; // 海拔高度(可正可负)
}

// 从视图矩阵提取相机方向
vec3 extractCameraRight(mat4 viewMatrix) {
    return normalize(vec3(viewMatrix[0][0], viewMatrix[1][0], viewMatrix[2][0]));
}
vec3 extractCameraUp(mat4 viewMatrix) {
    return normalize(vec3(viewMatrix[0][1], viewMatrix[1][1], viewMatrix[2][1]));
}
vec3 extractCameraFront(mat4 viewMatrix) {
    return normalize(-vec3(viewMatrix[0][2], viewMatrix[1][2], viewMatrix[2][2]));
}

// 计算高度相关的浓度因子 (统一公式)
float getHeightDensityFactor(float height) {
    float clampedHeight = clamp(height, -3000.0, 30000.0);
    return (1.0 - (clampedHeight + 3000.0) / 33000.0) * globalDensity;
}

// 1. 天空雾计算
float calculateSkyFogFactor(vec3 worldPos) {
    vec3 cameraFront = extractCameraFront(ViewMatrix);
    vec3 cameraRight = extractCameraRight(ViewMatrix);
    vec3 viewDir = normalize(worldPos - cameraPos);
    vec3 projection = viewDir - dot(viewDir, cameraRight) * cameraRight;
    projection = normalize(projection);
    vec3 earthFront = normalize(cameraFront - dot(cameraFront, earthCenterDir) * earthCenterDir);

    float angleFromEarthCenter = acos(dot(projection, earthFront)) * (180.0 / 3.14159);
    float angleAboveHorizon = angleFromEarthCenter;
    
    // 基础雾因子计算
    float transitionSteepness = 4.0;
    float transition = 1.0 / (1.0 + exp(-transitionSteepness * (angleAboveHorizon - 5.0)));
    
    float peakAngle = 10.0;
    float decayWidth = 25.0;
    float decay = exp(-pow((angleAboveHorizon - peakAngle) / decayWidth, 2.0));
    
    float fogFactor = transition * decay;
    fogFactor = pow(fogFactor, 0.8);
    
    // 应用高度和全局浓度影响
    float height = length(cameraPos) - 6378000.0; // 相机高度
    float heightFactor = getHeightDensityFactor(height);
    return saturate(fogFactor * heightFactor);
}


float calculateGroundFogFactor(vec3 worldPos) {
    // 步骤1:计算物体与相机的直线距离(世界空间中)
    float distanceToCamera = length(worldPos - cameraPos);
    
    // 步骤2:距离因子(核心逻辑:远处浓、近处清晰)
    float distanceFactor;
    if (distanceToCamera < depthFogStart) {
        distanceFactor = 0.0; // 近处完全清晰(无雾)
    } else if (distanceToCamera > depthFogEnd) {
        distanceFactor = 1.0; // 远处雾最浓
    } else {
        // 中间距离:平滑过渡,远处浓度增长更快
        float t = (distanceToCamera - depthFogStart) / (depthFogEnd - depthFogStart);
        distanceFactor = pow(t, 0.8); // 指数<1:前半段过渡慢,后半段快
    }
    
    // 总雾浓度:仅由距离因子和全局浓度控制
    float totalFactor = distanceFactor * globalDensity;
    return saturate(totalFactor);
}
//float calculateGroundFogFactor(vec3 worldPos) {
//    // 步骤1:计算物体与相机的距离(替代深度转换,更直接准确)
//    float distanceToCamera = length(worldPos - cameraPos); // 世界空间中物体到相机的直线距离
//    
//    // 步骤2:距离因子(远处雾浓、近处清晰,模拟“积分”效果)
//    // 当距离 < depthFogStart 时:雾浓度≈0(清晰)
//    // 当距离 > depthFogEnd 时:雾浓度≈1(最浓)
//    // 中间用 smoothstep 平滑过渡,且加入轻微指数增强远处浓度
//    float distanceFactor;
//    if (distanceToCamera < depthFogStart) {
//        distanceFactor = 0.0; // 近处完全清晰
//    } else if (distanceToCamera > depthFogEnd) {
//        distanceFactor = 1.0; // 远处完全应用雾效
//    } else {
//        // 平滑过渡 + 指数增强(让远处浓度增长更快,模拟“积分累积”)
//        float t = (distanceToCamera - depthFogStart) / (depthFogEnd - depthFogStart);
//        distanceFactor = pow(t, 0.8); // 指数 <1 让过渡前半段慢、后半段快
//    }
//    
//    //高度因子(物体高度越低,雾浓度越高)
//    float objectAltitude = getAltitude(worldPos); // 物体的海拔高度(山顶高、山脚低)
//    // 高度范围:[-3000, 30000] → 浓度因子范围 [1, 0](低处浓、高处淡)
//    float clampedAltitude = clamp(objectAltitude, -3000.0, 30000.0);
//    float heightFactor = (30000.0 - clampedAltitude) / (30000.0 + 3000.0); // 核心公式:低→1,高→0
//    
//    //合并高度、距离因子,叠加全局浓度
//    float totalFactor = heightFactor * distanceFactor * globalDensity;
//    return saturate(totalFactor);
//}

void main()
{
    vec2 tc = gl_TexCoord[0].st * textureSize2DRect(colorTex, 0);
    vec4 sceneColor = texture2DRect(colorTex, tc);
    float depth = texture2DRect(depthTex, tc).r;
    vec3 worldPos = reconstructWorldPosition(tc, depth);
    
    float totalFogFactor = 0.0;
    
    // 根据场景元素类型应用不同雾效
    if (isSky(depth)) {
        // 天空只应用天空雾
        totalFogFactor = calculateSkyFogFactor(worldPos);
    } else {
        // 地面元素应用高度雾和深度雾
        totalFogFactor = calculateGroundFogFactor(worldPos);
    }
    
    // 应用最终雾效

    gl_FragColor = mix(sceneColor, fogColor, totalFogFactor);
    //gl_FragColor = vec4(depth, depth, depth, 1.0);
}
)";

// 创建RTT相机
osg::Camera* OEPreciseHorizonBlurEffect::createRTTCamera(osg::TextureRectangle* colorTex, osg::TextureRectangle* depthTex)
{
        osg::Camera* camera = new osg::Camera;

        // 基本设置
        camera->setClearColor(osg::Vec4(0.0f, 0.0f, 0.0f, 1.0f));
        camera->setClearMask(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT);
        camera->setClearDepth(1.0f);
        camera->setRenderTargetImplementation(osg::Camera::FRAME_BUFFER_OBJECT);
        camera->setRenderOrder(osg::Camera::PRE_RENDER);
        camera->setViewport(0, 0, colorTex->getTextureWidth(), colorTex->getTextureHeight());

        // 使用绝对参考系
        camera->setReferenceFrame(osg::Transform::ABSOLUTE_RF);

        // 禁用自动缓冲区附件
        camera->setImplicitBufferAttachmentMask(0, 0);

        // 简化附件配置
        camera->attach(osg::Camera::COLOR_BUFFER, colorTex);
        camera->attach(osg::Camera::DEPTH_BUFFER, depthTex); 

        // 确保深度测试正确设置
        osg::StateSet* ss = camera->getOrCreateStateSet();
        ss->setMode(GL_DEPTH_TEST, osg::StateAttribute::ON);

        osg::ref_ptr<osg::Depth> depthAttr = new osg::Depth;
        depthAttr->setFunction(osg::Depth::LEQUAL);
        depthAttr->setRange(0.0, 1.0);
        ss->setAttributeAndModes(depthAttr.get(), osg::StateAttribute::ON);

        return camera;
}

// 创建后处理四边形
osg::Group* OEPreciseHorizonBlurEffect::createPostProcessingQuad(osg::TextureRectangle* colorTex, osg::TextureRectangle* depthTex)
{
        osg::Group* group = new osg::Group;

        osg::Geometry* geom = osg::createTexturedQuadGeometry(
                osg::Vec3(0, 0, 0),
                osg::Vec3(1920, 0, 0),
                osg::Vec3(0, 1080, 0),
                0.0, 0.0, 1920, 1080
        );
        _quadGeode = new osg::Geode;
        _quadGeode->addDrawable(geom);

        osg::StateSet* ss = _quadGeode->getOrCreateStateSet();
        ss->setMode(GL_LIGHTING, osg::StateAttribute::OFF);
        ss->setRenderingHint(osg::StateSet::TRANSPARENT_BIN);
        ss->setMode(GL_BLEND, osg::StateAttribute::ON);
        ss->setMode(GL_DEPTH_TEST, osg::StateAttribute::ON);

        ss->setTextureAttributeAndModes(0, colorTex, osg::StateAttribute::ON);
        ss->setTextureAttributeAndModes(1, depthTex, osg::StateAttribute::ON);

        osg::Program* program = new osg::Program;
        program->addShader(new osg::Shader(osg::Shader::FRAGMENT, GaussianHorizonBlurShader));
        ss->setAttributeAndModes(program, osg::StateAttribute::ON);

        // 添加统一变量
        ss->addUniform(new osg::Uniform("colorTex", 0));
        ss->addUniform(new osg::Uniform("depthTex", 1));
        _invViewProjMatrixUniform = new osg::Uniform("invViewProjMatrix", osg::Matrixf());
        ss->addUniform(_invViewProjMatrixUniform);
        _viewMatrixUniform = new osg::Uniform("ViewMatrix", osg::Matrixf());
        ss->addUniform(_viewMatrixUniform);
        _cameraPosUniform = new osg::Uniform("cameraPos", osg::Vec3f());
        ss->addUniform(_cameraPosUniform);
        _globalDensityUniform = new osg::Uniform("globalDensity", 1.0f);
        ss->addUniform(_globalDensityUniform);
        _fogColorUniform = new osg::Uniform("fogColor", osg::Vec4(1.0f, 1.0f, 1.0f, 0.7f));
        ss->addUniform(_fogColorUniform);
        _heightFogBaseAltUniform = new osg::Uniform("heightFogBaseAlt", 0.0f);
        ss->addUniform(_heightFogBaseAltUniform);
        _heightFogRangeUniform = new osg::Uniform("heightFogRange", 1000.0f);
        ss->addUniform(_heightFogRangeUniform);
        _depthFogStartUniform = new osg::Uniform("depthFogStart", 100.0f);
        ss->addUniform(_depthFogStartUniform);
        _depthFogEndUniform = new osg::Uniform("depthFogEnd", 100000.0f);
        ss->addUniform(_depthFogEndUniform);
        ss->addUniform(new osg::Uniform("earthCenterDir", osg::Vec3f()));

        _zNearUniform = new osg::Uniform("zNear", 100.0f);
        ss->addUniform(_zNearUniform);
        _zFarUniform = new osg::Uniform("zFar", 100000.0f);
        ss->addUniform(_zFarUniform);

        group->addChild(_quadGeode);
        return group;
}

void OEPreciseHorizonBlurEffect::open(osg::Group* sceneContent, osg::Camera* mainCamera)
{
        if (!sceneContent || !mainCamera) {
                std::cerr << "错误: 场景内容或主相机为空" << std::endl;
                return;
        }

        // 关键:若已有旧资源,先关闭清理
        if (_rttCamera.valid() || _hudCamera.valid()) {
                close();
        }

        // 获取主相机参数
        /*mainCamera->getProjectionMatrixAsPerspective(_fovy, _aspectRatio, _zNear, _zFar);*/

        // 创建纹理 - 使用TextureRectangle
        int texWidth = 1920;
        int texHeight = 1080;

        _colorTexture = new osg::TextureRectangle;
        _colorTexture->setTextureSize(texWidth, texHeight);
        _colorTexture->setInternalFormat(GL_RGBA);
        _colorTexture->setFilter(osg::Texture::MIN_FILTER, osg::Texture::LINEAR);
        _colorTexture->setFilter(osg::Texture::MAG_FILTER, osg::Texture::LINEAR);
        _colorTexture->setWrap(osg::Texture::WRAP_S, osg::Texture::CLAMP_TO_EDGE);
        _colorTexture->setWrap(osg::Texture::WRAP_T, osg::Texture::CLAMP_TO_EDGE);

        _depthTexture = new osg::TextureRectangle;
        _depthTexture->setTextureSize(texWidth, texHeight);
        _depthTexture->setInternalFormat(GL_DEPTH_COMPONENT32);
        _depthTexture->setSourceFormat(GL_DEPTH_COMPONENT);
        _depthTexture->setSourceType(GL_FLOAT);
        _depthTexture->setFilter(osg::Texture::MIN_FILTER, osg::Texture::NEAREST);
        _depthTexture->setFilter(osg::Texture::MAG_FILTER, osg::Texture::NEAREST);
        _depthTexture->setWrap(osg::Texture::WRAP_S, osg::Texture::CLAMP_TO_EDGE);
        _depthTexture->setWrap(osg::Texture::WRAP_T, osg::Texture::CLAMP_TO_EDGE);

        // 确保深度纹理使用正确的比较模式
        _depthTexture->setShadowComparison(false);
        _depthTexture->setShadowCompareFunc(osg::Texture::LEQUAL);

        // 创建RTT相机
        _rttCamera = createRTTCamera(_colorTexture.get(), _depthTexture.get());

        // 关键:将传入的sceneContent添加到RTT相机
        _rttCamera->addChild(sceneContent);

        // 设置RTT相机与主相机同步
        _rttCamera->setReferenceFrame(osg::Camera::ABSOLUTE_RF);

        // 创建HUD相机 - 使用参考代码中的设置
        _hudCamera = new osg::Camera;
        _hudCamera->setReferenceFrame(osg::Transform::ABSOLUTE_RF);
        _hudCamera->setClearMask(GL_DEPTH_BUFFER_BIT);
        _hudCamera->setRenderOrder(osg::Camera::POST_RENDER);
        _hudCamera->setAllowEventFocus(false);
        _hudCamera->setProjectionMatrix(osg::Matrix::ortho2D(0, 1, 0, 1)); // 关键修改
        _hudCamera->getOrCreateStateSet()->setMode(GL_LIGHTING, osg::StateAttribute::OFF);

        // 创建后处理四边形
        osg::Group* postQuad = createPostProcessingQuad(_colorTexture.get(), _depthTexture.get());
        _hudCamera->addChild(postQuad);

        // 获取根节点(sceneContent的父节点)
        osg::Group* root = dynamic_cast<osg::Group*>(sceneContent->getParent(0));
        if (!root) {
                std::cerr << "错误: 无法获取根节点" << std::endl;
                return;
        }

        // 从根节点移除sceneContent(避免重复渲染)
        root->removeChild(sceneContent);

        // 添加RTT和HUD相机到根节点
        root->addChild(_rttCamera);
        root->addChild(_hudCamera);

        // 检查主相机的深度设置
        std::cout << "主相机深度测试: " << (mainCamera->getStateSet() &&
                mainCamera->getStateSet()->getMode(GL_DEPTH_TEST) == osg::StateAttribute::ON ? "开启" : "关闭") << std::endl;

        // 检查主相机的清除设置
        std::cout << "主相机清除掩码: " << std::hex << mainCamera->getClearMask() << std::dec << std::endl;


        // 设置矩阵同步回调
        class MatrixSyncCallback : public osg::Camera::DrawCallback
        {
        public:
                MatrixSyncCallback(osg::Camera* mainCam) : _mainCamera(mainCam) {}

                virtual void operator()(osg::RenderInfo& renderInfo) const
                {
                        osg::Camera* camera = renderInfo.getCurrentCamera();
                        if (camera && _mainCamera.valid()) {
                                // 确保深度缓冲区完成写入
                                
                                camera->setViewMatrix(_mainCamera->getViewMatrix());
                                camera->setProjectionMatrix(_mainCamera->getProjectionMatrix());

                                double fovy; double aspectRatio; double zNear; double zFar;
                                _mainCamera->getProjectionMatrixAsPerspective(fovy, aspectRatio, zNear, zFar);
                                std::cout << "zNearFar: (" << zNear << ", " << zFar << std::endl;
                                camera->setProjectionMatrixAsPerspective(fovy, aspectRatio, zNear, zFar);

                                //glClear(GL_DEPTH_BUFFER_BIT);

                                // 每帧更新uniform
                                OEPreciseHorizonBlurEffect::updateUniforms(_mainCamera.get());


                        }
                 }

        private:
                osg::observer_ptr<osg::Camera> _mainCamera;
        };

        _rttCamera->setPreDrawCallback(new MatrixSyncCallback(mainCamera));

}

// 在文件末尾添加close方法
void OEPreciseHorizonBlurEffect::close()
{
        if (!_rttCamera && !_hudCamera) {
                std::cout << "精确地平线模糊效果未开启,无需关闭" << std::endl;
                return;
        }

        // 1. 恢复场景图结构(将sceneContent移回原位置)
        osg::Group* root = nullptr;
        osg::Node* sceneContent = nullptr;

        if (_rttCamera.valid() && _rttCamera->getNumChildren() > 0) {
                sceneContent = _rttCamera->getChild(0); // 获取原场景内容
                root = dynamic_cast<osg::Group*>(_rttCamera->getParent(0));

                // 即使root获取失败,也尝试从sceneContent的原始父节点恢复(更健壮)
                if (!root && sceneContent) {
                        root = dynamic_cast<osg::Group*>(sceneContent->getParent(0));
                }

                if (root && sceneContent) {
                        _rttCamera->removeChild(sceneContent); // 从RTT相机移除
                        root->addChild(sceneContent);          // 移回根节点
                }
        }

        // 2. 从根节点移除RTT和HUD相机
        if (root && _rttCamera.valid()) {
                root->removeChild(_rttCamera);
        }
        if (root && _hudCamera.valid()) {
                root->removeChild(_hudCamera);
        }

        // 3. 彻底重置所有静态成员(关键!)
        _rttCamera = nullptr;
        _hudCamera = nullptr;
        _colorTexture = nullptr;
        _depthTexture = nullptr;
        _quadGeode = nullptr; // 重置后处理四边形
        _invViewProjMatrixUniform = nullptr;
        _cameraPosUniform = nullptr;
        _viewMatrixUniform = nullptr;
        _globalDensityUniform = nullptr;
        _fogColorUniform = nullptr;
        _heightFogBaseAltUniform = nullptr;
        _heightFogRangeUniform = nullptr;
        _depthFogStartUniform = nullptr;
        _depthFogEndUniform = nullptr;
}

void OEPreciseHorizonBlurEffect::updateUniforms(osg::Camera* camera)
{
        if (!camera || !_hudCamera || !_quadGeode) return;
        //获取远近裁剪面
        double fovy; double aspectRatio; double zNear; double zFar;
        camera->getProjectionMatrixAsPerspective(fovy, aspectRatio, zNear, zFar);

        

        // 获取视图矩阵和投影矩阵
        osg::Matrixd viewMatrix = camera->getViewMatrix();
        osg::Matrixd projMatrix = camera->getProjectionMatrix();

        /*std::cout << "0: " << viewMatrix(0,0) << " 1:" << viewMatrix(0, 1) << " 2:" << viewMatrix(0, 2) << std::endl;*/

        osg::Matrixd ViewMatrix = osg::Matrixd::inverse(viewMatrix);
        // 计算逆视图投影矩阵
        osg::Matrixd viewProjMatrix = viewMatrix * projMatrix;
        osg::Matrixd invViewProjMatrix = osg::Matrixd::inverse(viewProjMatrix);

        // 获取相机位置
        osg::Matrixd invViewMatrix = osg::Matrixd::inverse(viewMatrix);
        osg::Vec3d cameraPos = invViewMatrix.getTrans();

        // 计算地心方向向量(关键修改)
        osg::Vec3d earthCenterDir = -cameraPos;
        earthCenterDir.normalize();

        // 关键修复:更新后处理四边形的状态集
        if (_hudCamera->getNumChildren() > 0) {
                /*osg::Node* postQuad = _hudCamera->getChild(0);*/
                osg::StateSet* ss = _quadGeode->getOrCreateStateSet();

                //更新视图矩阵
                osg::Uniform* ViewUniform = ss->getUniform("ViewMatrix");
                if (!ViewUniform) {
                        ViewUniform = new osg::Uniform("ViewMatrix", osg::Matrixf());
                        ss->addUniform(ViewUniform);
                }
                ViewUniform->set(osg::Matrixf(viewMatrix));

                // 更新逆视图投影矩阵
                osg::Uniform* invViewProjUniform = ss->getUniform("invViewProjMatrix");
                if (!invViewProjUniform) {
                        invViewProjUniform = new osg::Uniform("invViewProjMatrix", osg::Matrixf());
                        ss->addUniform(invViewProjUniform);
                }
                invViewProjUniform->set(osg::Matrixf(invViewProjMatrix));

                // 更新相机位置
                osg::Uniform* cameraPosUniform = ss->getUniform("cameraPos");
                if (!cameraPosUniform) {
                        cameraPosUniform = new osg::Uniform("cameraPos", osg::Vec3f());
                        ss->addUniform(cameraPosUniform);
                }
                cameraPosUniform->set(osg::Vec3f(cameraPos));

                // 设置地心方向向量
                osg::Uniform* earthCenterDirUniform = ss->getUniform("earthCenterDir");
                if (!earthCenterDirUniform) {
                        earthCenterDirUniform = new osg::Uniform("earthCenterDir", osg::Vec3f());
                        ss->addUniform(earthCenterDirUniform);
                }
                earthCenterDirUniform->set(osg::Vec3f(earthCenterDir));

                //更新远近裁剪面
                osg::Uniform* zNearUniform = ss->getUniform("zNear");
                if (!zNearUniform) {
                        zNearUniform = new osg::Uniform("zNear", float(zNear));
                        ss->addUniform(zNearUniform);
                }
                zNearUniform->set(float(zNear));

                osg::Uniform* zFarUniform = ss->getUniform("zFar");
                if (!zFarUniform) {
                        zFarUniform = new osg::Uniform("zFar", float(zFar));
                        ss->addUniform(zFarUniform);
                }
                zFarUniform->set(float(zFar));

        }
        // 调试输出
        std::cout << "zNearFar: (" << zNear << ", " << zFar 
                << ") viewMatrix: (" << viewMatrix(0,0) << ", " << viewMatrix(0, 1) << ", " << viewMatrix(0, 2) <<std::endl;
}

void OEPreciseHorizonBlurEffect::setGlobalDensity(float density)
{
        if (_globalDensityUniform) {
                float clamped = std::max(0.0f, std::min(density, 1.0f));
                _globalDensityUniform->set(clamped);
        }
}

void OEPreciseHorizonBlurEffect::setHeightFogParams(float baseAlt, float range)
{
        if (_heightFogBaseAltUniform)
                _heightFogBaseAltUniform->set(baseAlt);
        if (_heightFogRangeUniform)
                _heightFogRangeUniform->set(range);
}

void OEPreciseHorizonBlurEffect::setDepthFogParams(float start, float end)
{
        if (_depthFogStartUniform)
                _depthFogStartUniform->set(start);
        if (_depthFogEndUniform)
                _depthFogEndUniform->set(end);
}

void OEPreciseHorizonBlurEffect::setFogColor(osg::Vec4 color)
{
        if (_fogColorUniform)
                _fogColorUniform->set(color);
}

  • 写回答

5条回答 默认 最新

  • 阿里嘎多学长 2025-10-28 19:18
    关注

    阿里嘎多学长整理AIGC生成,因移动端显示问题导致当前答案未能完全显示,请使用PC端查看更加详细的解答过程

    解决方案

    根据你的描述,问题应该出在深度值上面。这里有几点需要注意:

    1. 深度值的计算: 确保你正确地计算了深度值。osg3.4中,深度值是通过 osg::Depth 类计算的。确保你使用了正确的深度值计算公式。
    2. 深度值的传递: 确保你正确地传递了深度值到着色器中。osg3.4中,深度值可以通过 osg::StateAttribute 类传递到着色器中。
    3. 深度值的裁剪: 确保你正确地裁剪了深度值。osg3.4中,深度值可以通过 osg::Depth 类裁剪。

    核心代码

    // 创建深度状态
    osg::StateAttribute* depthState = new osg::StateAttribute(osg::StateAttribute::DEPTHPROCESSING);
    
    // 设置深度值计算公式
    depthState->setDepthFunc(osg::Depth::LEQUAL);
    
    // 设置深度值传递
    depthState->setDepthRange(0.0f, 1.0f);
    
    // 添加深度状态到节点
    node->setStateAttribute(depthState);
    

    建议

    • 确保你正确地使用了osg3.4和oe2.7的API。
    • 使用osg3.4的调试工具来检查深度值的计算和传递。
    • 使用着色器调试工具来检查深度值的裁剪。
    评论

报告相同问题?

问题事件

  • 创建了问题 10月28日