我理解的光线投射是遍历屏幕坐标生成不同起点的射线进行采样,累积颜色后绘制出二维图像。现在遍历了不同的屏幕坐标 (x, y),却发现屏幕坐标转换的世界坐标都是相同的,完全没有变化。
osg::Vec3d eye, center, up;
camera->getViewMatrixAsLookAt(eye, center, up);
osg::Matrix viewMatrix = _camera->getViewMatrix();
osg::Matrix projMatrix = _camera->getProjectionMatrix();
osg::Matrix inverseVP = osg::Matrix::inverse(viewMatrix * projMatrix);
for (int y = 0; y < height; ++y) {
for (int x = 0; x < width; ++x) {
double ndc_x = (2.0f * screen_x / width) - 1.0f;
double ndc_y = -(2.0f * screen_y / height) +1.0f;
osg::Vec4 clipNear(ndc_x, ndc_y, -1.0,1.0);
osg::Vec4 worldNearH = clipNear * inverseVP;
osg::Vec3d worldNear(worldNearH.x() / worldNearH.w(), worldNearH.y() / worldNearH.w(), worldNearH.z() / worldNearH.w());
osg::Vec4 clipFar(ndc_x, ndc_y, 1.0, 1.0);
osg::Vec4 worldFarH = clipFar * inverseVP;
osg::Vec3d world_sta = osg::Vec3d(x, y, 0) * inverseVPW;
osg::Vec3d worldFar(worldFarH.x() / worldFarH.w(), worldFarH.y() / worldFarH.w(), worldFarH.z() / worldFarH.w());
osg::Vec4 noNDCclipNear(x, y, -1.0, 1.0);
osg::Vec4 noNDCworldNearH = noNDCclipNear * inverseVP;
osg::Vec3d noNDCworldNear(noNDCworldNearH.x() / noNDCworldNearH.w(), noNDCworldNearH.y() / noNDCworldNearH.w(), noNDCworldNearH.z() / noNDCworldNearH.w());
osg::Vec3d rayDirection = center - eye;
rayDirection.normalize();
Ray rayOfPixel(eye, rayDirection);
......
}
}