osg使用整理(11):延遲渲染

王小于的啦發表於2024-06-03

osg使用整理(11):延遲渲染

一、基礎概念

  1. 前向渲染流程:頂點著色器->圖元裝配成點線三角形->幾何著色器->裁剪剔除->光柵化(片元著色器)->透明度測試、深度測試。

  2. 延遲渲染流程:頂點著色器->圖元裝配成點線三角形->幾何著色器->裁剪剔除->光柵化輸出G-Buffer,儲存每個畫素的屬性資訊(位置、法線、顏色)->深度測試->光照計算->片元著色器->透明度測試。

  3. 渲染管線的差異:

    a. 延遲渲染需要兩個pass,先生成G-Buffer後進行光照計算。

    b. 延遲渲染不能使用MSAA演算法抗鋸齒。

  4. 優劣勢:

    a. 延遲渲染先進行深度測試,確定了可見畫素後再進行光照計算,而不是對所有圖元進行光柵化再光照計算,避免了大量無效計算。

    b. 延遲渲染在一個Pass中處理多光源計算,提高了渲染效率。

    c. 延遲渲染的G-Buffer佔用頻寬較大,需要合併一些紋理通道、減少buffer位數、將兩個pass合併為OnePassDeferred。

    d. 延遲渲染通常只能使用相同光照的效果,靈活性低。

    e. 延遲渲染中透明物體需要單獨的Pass來處理。

二、G-Buffer

​ 如下圖所示,延遲渲染首先生成稱為G-Buffer的一系列紋理,常包含世界座標系下的位置向量、顏色向量、頂點法線向量等等。

​ osg中要實現延遲渲染,首先準備離屏相機:

osg::ref_ptr<RttCamera> createRTTCamera(osg::Camera::BufferComponent buffer,osg::Texture* tex,int width,int height)
{
	osg::ref_ptr<RttCamera> camera=new RttCamera(width,height);
	camera->setRenderTargetImplementation(osg::Camera::RenderTargetImplementation::FRAME_BUFFER_OBJECT);
	camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
	camera->setPostDrawCallBack(new FBOPostDrawCallback);
	camera->setRenderOrder(osg::Camera::PRE_RENDER,20);
	camera->setViewPort(0,0,width,height);
	if(tex)
	{
		tex->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		tex->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		camera->setViewPort(0,0,tex->getTextureWidth(),tex->getTextureHeight());
		camera->attach(buffer,tex);
	}
	return camera;
}

​ 注意到,離屏相機渲染目標設定為FBO,同時渲染次序設定為PRE_RENDER。然後準備附著在離屏相機上的顏色和深度紋理,簡單認為有三維位置座標紋理、法線方向紋理、基礎顏色紋理三個。為了提高紋理精度,實現HDR渲染,我們可以使用浮點數緩衝,其內部格式通常設為GL_RGB16F, GL_RGBA16F, GL_RGB32F 或者GL_RGBA32F。浮點數緩衝可以儲存超過0.0到1.0範圍的浮點值。

//建立顏色附著紋理
osg::ref_ptr<osg::Texture2D> createColorTexture(int width,int height)
{
	osg::ref_ptr<osg::Texture2D> texture=new osg::Texture2D;
	texture->setTextureSize(width,height);
	texture->setInternalFormat(GL_RGBA32F);
	texture->setSourceFormat(GL_RGBA);
	texture->setSourceType(GL_FLOAT);//浮點數精度更高
	tex->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINEAR);
	tex->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINEAR);
    return texture;
}
//建立深度附著紋理
osg::ref_ptr<osg::Texture2D> createDepthTexture(int width,int height)
{
	osg::ref_ptr<osg::Texture2D> texture=new osg::Texture2D;
	texture->setTextureSize(width,height);
	texture->setInternalFormat(GL_DEPTH_COMPONENT32F);
	texture->setSourceFormat(GL_DEPTH_COMPONENT32F);
	texture->setSourceType(GL_FLOAT);
	tex->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINEAR);
	tex->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINEAR);
    return texture;	
}

​ 建立延遲渲染相機,首先渲染到G-buffer上,然後建立HUD相機渲染到和螢幕同樣大小的矩形上。

/*延遲渲染相機*/
osg::ref_ptr<RttCamera> createDeferCamera(osg::Camera::BufferComponent buffer1,osg::Texture* tex1,
										  osg::Camera::BufferComponent buffer2,osg::Texture* tex2,
                                          osg::Camera::BufferComponent buffer3,osg::Texture* tex3,int width,int height)
{
	osg::ref_ptr<RttCamera> camera=new RttCamera(width,height);
	camera->setRenderTargetImplementation(osg::Camera::RenderTargetImplementation::FRAME_BUFFER_OBJECT);
	camera->setClearMask(GL_COLOR_BUFFER_BIT|GL_DEPTH_BUFFER_BIT);
	camera->setPostDrawCallBack(new FBOPostDrawCallback);
	camera->setRenderOrder(osg::Camera::PRE_RENDER,20);
	camera->setViewPort(0,0,width,height);
	if(tex1)
	{
		tex1->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		tex1->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		camera->attach(buffer1,tex1);
	}
	if(tex2)
	{
		tex2->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		tex2->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		camera->attach(buffer2,tex2);
	}
	if(tex3)
	{
		tex3->setFilter(osg::Texture::MIN_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		tex3->setFilter(osg::Texture::MAG_FILTER,osg::Texture::LINER_MIPMAP_NEAREST);
		camera->attach(buffer3,tex3);
	}
	///頂點著色器
	const char* vertCode=R"(
		#version 330
		layout(location = 0) in vec3 Position;
		layout(location = 2) in vec3 normal;
		layout(location = 3) in vec3 TexCoord;
		
		uniform mat4 osg_ModelViewProjectionMatrix;
		uniform mat4 osg_ModelViewMatrix;
		uniform mat4 osg_NormalMatrix;
		
		out vec3 vNormal;
		out vec2 texCoord;
		out vec4 fragPos;
		void main()
		{
			texCoord=TexCoord;
			fragPos=osg_ModelViewMatrix*vec4(Position,1.0);
			vec4 viewNorm=transpose(inverse(osg_ModelViewMatrix))*vec4(-normal,1.0);
			vNormal=normalize(viewNorm.xyz);
			gl_Position=osg_ModelViewProjectionMatrix*vec4(Position,1.0);
		}
	)";
	
	const char* fragCode=R"(
		#version 330 core
		uniform vec3 frontCol=vec3(1.0,0.0,0.2);
        layout (location = 0) out vec4 gColor;
        layout (location = 1) out vec4 gNormal;
        layout (location = 2) out vec4 gPosition;

        in vec2 texCoord;
        in vec4 fragPos;
        in vec3 vNormal;

        void main()
        {    
            // Store the fragment position vector in the first gbuffer texture
            gPosition.xyz = fragPos.xyz;
            // Also store the per-fragment normals into the gbuffer
            gNormal = vec4(vNormal,1.0);          
            gColor=vec4(frontCol,1.0);
        }
	)";
	
	osg::ref_ptr<osg::Shader> vertShader=new osg::Shader(osg::Shader::VERTEX,vertCode);
    osg::ref_ptr<osg::Shader> fragShader=new osg::Shader(osg::Shader::FRAGMENT,fragCode);
    osg::ref_ptr<osg::Program>  program=new osg::Program;
    program->addShader(vertShader);
    program->addShader(fragShader);
    camera->getOrCreateStateSet()->setAttributeAndModes(program,OVERRIDE_ON);
    return camera;
}

​ HUD相機用於將紋理混合結果輸出到螢幕上,需要注意繫結一個矩形幾何為子節點,並正確設定渲染目標,設定RenderOrder為POST_RENDER。

/*延遲HUD相機*/
osg::ref_ptr<RttCamera> createHUDCamera(osg::Texture* tex1,osg::Texture* tex2,int width,int height)
{
	osg::ref_ptr<RttCamera> camera=new RttCamera(width,height);
	camera->setClearMask(GL_DEPTH_BUFFER_BIT);
	camera->setPostDrawCallBack(new FBOPostDrawCallback);
	camera->setRenderOrder(osg::Camera::POST_RENDER,100);
	camera->setReferenceFrame(osg::Transform::ABSOLUTE_RF)
	camera->setProjectMatrix(osg::Matrix::ortho2D(width,-width,-height,height))
	camera->setViewPort(0,0,width,height);
	camera->addChild(createQuadGeode(tex1,tex2,width,height));
	return camera;
}

/*平鋪的四邊形幾何*/
osg::ref_ptr<osg::Geode> createQuadGeode(osg::Texture* baseTexture,osg::Texture* modelTexture,int width,int height)
{
	///建立四邊形頂點
	osg::ref_ptr<osg:Vec3Array> vertices= new osg::Vec3Array;
	vertices->push_back(osg::Vec3(-width,-height,0.f));
	vertices->push_back(osg::Vec3(width,-height,0.f));
	vertices->push_back(osg::Vec3(width,height,0.f));
	vertices->push_back(osg::Vec3(width,-height,0.f));
	///建立四邊形法線
	osg::ref_ptr<osg:Vec3Array> normals= new osg::Vec3Array;
	normals->push_back(osg::Vec3(0.0,0.0,2.f));
	///建立四邊形紋理座標
	osg::ref_ptr<osg:Vec2Array> texCoords= new osg::Vec2Array;
	texCoords->push_back(osg::Vec2(1.0,0.f));
	texCoords->push_back(osg::Vec2(0.0,0.f));
	texCoords->push_back(osg::Vec2(0.0,1.f));
	texCoords->push_back(osg::Vec2(1.0,1.f));
	///建立四邊形幾何
	osg::ref_ptr<osg:Geometry> quad= new osg::Geometry;
	quad->setVertexArray(vertices);
	quad->setNormalArray(normals);
	quad->setTexCoordArray(0,texCoords);
	quad->addPrimitiveSet(new osg::DrawArrays(GL_QUADS,0,4));
	///建立四邊形節點
	osg::ref_ptr<osg::Geode> quadGeode=new osg::Geode;
	quadGeode->addDrawable(quad);
	quadGeode->getOrCreateStateSet()->addUniform(new osg::Uniform("baseTexture",0)));
	quadGeode->getOrCreateStateSet()->setTextureAttributeAndModes(0,baseTexture);
	quadGeode->getOrCreateStateSet()->addUniform(new osg::Uniform("modelTexture",1)));
	quadGeode->getOrCreateStateSet()->setTextureAttributeAndModes(1,modelTexture);
	
	const char* vertCode=R"(
		#version 330
		layout(location = 0) in vec3 Position;
		layout(location = 2) in vec3 normal;
		layout(location = 3) in vec3 TexCoord;
		
		uniform mat4 osg_ModelViewProjectionMatrix;
		uniform mat4 osg_ModelViewMatrix;
		uniform mat4 osg_NormalMatrix;
		
		out vec2 texCoord;
		void main()
		{
			texCoord=TexCoord;
			gl_Position=osg_ModelViewProjectionMatrix*vec4(Position,1.0);
		}
	)";
	
	const char* fragCode=R"(
		uniform sampler2D baseTexture;
		uniform sampler2D modelTexture;
		
		in vec2 texCoord;
		out vec4 fragColor;
		void main()
		{
			vec4 modelCol=texture(modelTexture,texCoord);
			vec4 baseCol=texture(baseTexture,texCoord);
			fragColor=vec4(mix(modelCol.rgb,baseCol.rgb,baseCol.a),baseCol.a+modelCol.a);
		}
	)";
    osg::ref_ptr<osg::Shader> vertShader=new osg::Shader(osg::Shader::VERTEX,vertCode);
    osg::ref_ptr<osg::Shader> fragShader=new osg::Shader(osg::Shader::FRAGMENT,fragCode);
    osg::ref_ptr<osg::Program>  program=new osg::Program;
    program->addShader(vertShader);
    program->addShader(fragShader);
    quadGeode->getOrCreateStateSet()->setAttributeAndModes(program,OVERRIDE_ON);
	return quadGeode;
}

相關文章