HBAO的Unity实现杂记 | Blurred code

HBAO的Unity实现杂记

2022/10/15

LastMod:2022/10/16

Categories: CG

HBAO实现杂记

主要参考原始的Image-Space Horizon-Based Ambient Occlusion.pdf。 主要思想是以在半圆内朝着不同的方向步进,步进的方向越崎岖,则最后的AO值越大。 核心公式在第12页,

HBAO的Unity实现杂记-2022-10-15-22-51-41

Tangent Bias

edit-a72f1521e2194ba8acb7071e7d881b00-2022-10-13-16-53-45

为了减少步进的表面不平带来的jitter问题,加入bias。

在一些几何体面数不够的时候(本来应该是一个光滑的曲面),会在三角面片的接缝出现一些AO的计算。 因此需要一个Bias参数用来忽略一些较小的AO值(通过抬升Tangent向量实现)。

要获得每个像素的切线方向只有估算。在估算某个像素对应的位置的切线时候,我们只有用屏幕空间周围的像素来估算。 PPT里用的是dpdx,dpdy的方式进行估计,其思想类似于函数求导时的有限差分方法。 测试了一下,使用对称差分的方式获得的效果比较好。代码类似于

float3 tangent =
    FetchViewPos(input.uv + dir * _MainTex_TexelSize.xy) -
    FetchViewPos(input.uv - dir * _MainTex_TexelSize.xy);
tangent = normalize(tangent);

距离加权采样

为了减少由于半球采样带来的不连续的问题(由于半径限制,部分采样点在A像素点能采样到,B像素点采样不到,导致这两个像素点计算的AO值会有明显的差异),加入距离衰减使得出现在A,B两个像素的AO值获得一个柔滑的过度。

HBAO的Unity实现杂记-2022-10-15-23-07-52

观察图中的公式,假设权重W(S) = 1恒定为1,那么累加的加权后的WAO等于

AO(S2) - AO(S1) = sin(S2) - sint

逐步加权采样的实现要点:

  1. 需要有一个Top变量追踪当前最大的sin(theta) - sin(Tanget),初始化为0
  2. 每个步进只计算比当前AO大的点
  3. 越靠近半圆的边界,对AO的贡献越低
inline float fullAO(float3 pos, float3 stepPos, float3 normal,float3 tangent ,inout float top)
{
    float3 h = stepPos - pos;
    float3 h_dir = normalize(h);
    // 计算采样点的sin
    float tanH = ViewPosTangent(h_dir);
    float sinH = TanToSin(tanH);
    // 计算Tanget的sin
    float tanT = BiasedViewPosTanget(tangent,_AOBias);
    float sinT = TanToSin(tanT);

    // 当前采样的AO值
    float sinBlock = sinH - sinT; 
    // 如果低于之前的采样点,就是0,如果比之前的采样点大,则计算它的贡献(但是要算上距离衰减)
    float diff = max(sinBlock - top, 0);
    top = max(sinBlock, top);

    // 计算采样点距离采样中心的距离,衰减为 1 - d^2/r^2
    float dist = length(h);
    return diff * FallOff(dist);
}

实现上的差异

  1. PPT第46页提到的Snap UV在代码中似乎会引入画面出现某种奇怪的花纹,也许是我没实现正确。使用Bilinear Sampling没有看到肉眼可见的瑕疵,所以没有做。
  2. PPT第27页提到的DepthAware Blur没有做,换成了简单的Gaussian Blur,会造成边缘有点模糊。

实现效果见下图,图片点击可放大

Reference

  1. https://github.com/scanberg/hbao/blob/master/resources/shaders/hbao_frag.glsl
  2. https://github.com/shadylyf321/HBAO

附录

Gist: https://gist.github.com/BlurryLight/b351cd29a21399681df5a1ac66c0b3d3

#ifndef HBAO_CGINC
#define HBAO_CGINC
#include "UnityCG.cginc"
sampler2D _MainTex;
float4 _MainTex_TexelSize;
half4x4 _WorldToViewMatrix;
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
v2f HBAO_vert(appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
return o;
}
float _AOStrength;
float _AORadius;
float _AOBias;
float _EnableBlur;
sampler2D _CameraGBufferTexture2;
sampler2D _CameraDepthTexture;
float2 _InvTextureSize;
float getRawDepth(float2 uv) { return SAMPLE_DEPTH_TEXTURE_LOD(_CameraDepthTexture, float4(uv, 0.0, 0.0)); }
inline float3 FetchViewPos(float2 uv)
{
float3 viewSpaceRay = mul(unity_CameraInvProjection, float4(uv * 2.0 - 1.0, 1.0, 1.0) * _ProjectionParams.z);
float rawDepth = getRawDepth(uv);
return viewSpaceRay * Linear01Depth(rawDepth);
}
inline float FallOff(float dist)
{
return saturate(1 - dist * dist / (_AORadius * _AORadius));
}
inline float TanToSin(float x)
{
return x / sqrt(x * x + 1.0);
}
inline float ViewPosTan(float3 V)
{
// 对于DirectX平台,Cam坐标系与OpenGL不同,主要是Z轴是反向的
// DirectX中,相机正对的方向是Z轴正向,对于ViewSpace的坐标,Z为负的代表指向相机
// Unity 遵循OpenGL惯例
// Z轴正向指向相机
return V.z;
}
inline float BiasedViewPosTan(float3 V, float bias)
{
//bias [0,1]
float tangentBias = tan(bias * 0.5 * UNITY_PI);
return ViewPosTan(V) + tangentBias;
}
float fullAO(float3 pos, float3 stepPos, float3 tangentVec, inout float top)
{
float3 h = stepPos - pos;
float3 h_dir = normalize(h);
float tanH = ViewPosTan(h_dir);
float sinH = TanToSin(tanH);
float tanT = BiasedViewPosTan(tangentVec, _AOBias);
float sinT = TanToSin(tanT);
float dist = length(h);
float sinBlock = sinH - sinT;
float diff = max(sinBlock - top, 0);
top = max(sinBlock, top);
return diff * FallOff(dist);
}
// very bad noise
// from here https://forum.unity.com/threads/generate-random-float-between-0-and-1-in-shader.610810/
float random(float2 uv)
{
return frac(sin(dot(uv, float2(12.9898, 78.233))) * 43758.5453123);
}
float4 HBAO_frag(v2f input) : SV_Target
{
float ao = 0;
// half depth = Linear01Depth(SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, input.uv));
// half3 WorldNormal = tex2D(_CameraGBufferTexture2, input.uv).rgb * 2 - 1;
// half3 viewNormal = normalize(mul((half3x3)_WorldToViewMatrix, WorldNormal));
float3 viewPosition = FetchViewPos(input.uv);
float rnd = random(input.uv);
const int NumDirs = 4;
float delta = 2.0 * UNITY_PI / (NumDirs + 1);
const int NumSteps = 6;
float stepSize = _AORadius / abs(viewPosition.z);
// 最大的步进半径除以ViewPos,如果小于1则说明这个区域太远了,AO影响很小
if (stepSize < 1.0) return 1.0;
stepSize /= NumSteps;
float InitialAngle = delta * rnd;
UNITY_UNROLL
for (int i = 0; i < NumDirs; i++)
{
float angle = InitialAngle + delta * i;
float cos, sin;
sincos(angle, sin, cos);
float2 dir = float2(cos, sin);
float rayPixel = 1;
float top = 0;
float3 tangentVec =
FetchViewPos(input.uv + dir * _InvTextureSize) -
FetchViewPos(input.uv - dir * _InvTextureSize);
tangentVec = normalize(tangentVec);
UNITY_UNROLL
for (int j = 0; j < NumSteps; ++j)
{
float2 stepUV = rayPixel * dir * _InvTextureSize + input.uv;
float3 stepViewPos = FetchViewPos(stepUV);
ao += fullAO(viewPosition, stepViewPos, tangentVec, top);
rayPixel += stepSize;
}
}
ao /= float(NumDirs);
ao = ao * _AOStrength;
return saturate(1 - ao);
}
sampler2D _AOTex;
float4 HBAO_merge_frag(v2f input) : SV_Target
{
float ao = tex2D(_AOTex, input.uv);
float4 col = tex2D(_MainTex, input.uv);
return float4(col.rgb * ao, col.a);
}
//9x9 gaussian blur
//https://rastergrid.com/blog/2010/09/efficient-gaussian-blur-with-linear-sampling/
float4 horizontal_blur(v2f input) : SV_Target
{
const float3 offset = float3(0.0f, 1.3846153846f, 3.2307692308f);
const float3 weight = float3(0.2270270270f, 0.3162162162f, 0.0702702703f);
float4 centerColor = tex2D(_MainTex, input.uv) * weight.x;
centerColor += tex2D(_MainTex, input.uv + float2(0.0, offset.y) * _MainTex_TexelSize.xy) * weight.y;
centerColor += tex2D(_MainTex, input.uv - float2(0.0, offset.y) * _MainTex_TexelSize.xy) * weight.y;
centerColor += tex2D(_MainTex, input.uv + float2(0.0, offset.z) * _MainTex_TexelSize.xy) * weight.z;
centerColor += tex2D(_MainTex, input.uv - float2(0.0, offset.z) * _MainTex_TexelSize.xy) * weight.z;
return centerColor;
}
float4 vertical_blur(v2f input) : SV_Target
{
const float3 offset = float3(0.0f, 1.3846153846f, 3.2307692308f);
const float3 weight = float3(0.2270270270f, 0.3162162162f, 0.0702702703f);
float4 centerColor = tex2D(_MainTex, input.uv) * weight.x;
centerColor += tex2D(_MainTex, input.uv + float2(offset.y, 0.0) * _MainTex_TexelSize.xy) * weight.y;
centerColor += tex2D(_MainTex, input.uv - float2(offset.y, 0.0) * _MainTex_TexelSize.xy) * weight.y;
centerColor += tex2D(_MainTex, input.uv + float2(offset.z, 0.0) * _MainTex_TexelSize.xy) * weight.z;
centerColor += tex2D(_MainTex, input.uv - float2(offset.z, 0.0) * _MainTex_TexelSize.xy) * weight.z;
return centerColor;
}
//3x3 gaussian blur
float4 tap4blur(v2f input) : SV_Target
{
const float4 duv = _MainTex_TexelSize.xyxy * float4(0.5, 0.5, -0.5, 0);
half4 acc;
acc = tex2D(_MainTex, input.uv - duv.xy);
acc += tex2D(_MainTex, input.uv - duv.zy);
acc += tex2D(_MainTex, input.uv + duv.zy);
acc += tex2D(_MainTex, input.uv + duv.xy);
return acc * 0.25f;
}
#endif
view raw HBAO.cginc hosted with ❤ by GitHub
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.Rendering;
[ExecuteAlways]
[ImageEffectAllowedInSceneView]
[RequireComponent(typeof(Camera))]
public class HBAO : MonoBehaviour
{
public enum HBAO_OutPass
{
AO,
Blurred,
Combined
}
public enum HBAO_Resolution
{
Full = 1,
Half = 2,
Quarter = 4
}
[Header("HBAO Properties")] [SerializeField]
private HBAO_Resolution resolution = HBAO_Resolution.Half;
[SerializeField] [Range(0.1f, 10.0f)] private float AOStrength;
[SerializeField] [Range(0.01f,0.5f)] private float MaxRadiusInUV;
[SerializeField] [Range(0.1f,1.0f)] private float AOBias;
private Material HBAOMaterial_ = null;
private CommandBuffer HBAOcmd_ = null;
private Camera RenderCamera_ = null;
[SerializeField] private HBAO_OutPass HBAODebug = HBAO_OutPass.Combined;
public Material HBAOMaterial
{
get
{
if (!HBAOMaterial_)
{
HBAOMaterial_ = new Material(Shader.Find("Hidden/HBAO"));
}
return HBAOMaterial_;
}
}
public Camera RenderCamera
{
get
{
if (RenderCamera_) return RenderCamera_;
RenderCamera_ = GetComponent<Camera>();
return RenderCamera_;
}
}
public CommandBuffer HBAOcmd
{
get
{
if (HBAOcmd_ != null) return HBAOcmd_;
HBAOcmd_ = new CommandBuffer();
HBAOcmd_.name = "HBAOCommandBuffer";
return HBAOcmd_;
}
}
private static class ShaderSheets
{
public static int AOStrength;
public static int AORadius;
public static int AOBias;
public static int InvTextureSize;
static ShaderSheets()
{
AOStrength = Shader.PropertyToID("_AOStrength");
AORadius = Shader.PropertyToID("_AORadius");
AOBias = Shader.PropertyToID("_AOBias");
InvTextureSize = Shader.PropertyToID("_InvTextureSize");
}
}
//basic logic
private void OnEnable()
{
RenderCamera.AddCommandBuffer(CameraEvent.BeforeImageEffectsOpaque,HBAOcmd);
}
private void OnDisable()
{
RenderCamera.RemoveCommandBuffer(CameraEvent.BeforeImageEffectsOpaque,HBAOcmd);
}
//when the camera is removed
private void OnDestroy()
{
HBAOcmd ? .Dispose();
}
private void OnPreRender()
{
UpdateVariable();
RenderHBAO();
}
private void UpdateVariable()
{
HBAOMaterial.SetFloat(ShaderSheets.AOStrength,AOStrength);
HBAOMaterial.SetFloat(ShaderSheets.AOBias,AOBias);
float tanHalfFovY = Mathf.Tan(RenderCamera.fieldOfView * 0.5f * Mathf.Deg2Rad);
// AORadius决定了最大的步进范围,步进范围与viewZ成反比,viewZ越大步进范围越小
float MaxRadius= MaxRadiusInUV * (RenderCamera.pixelHeight / (int)resolution) / tanHalfFovY;
HBAOMaterial.SetFloat(ShaderSheets.AORadius, MaxRadius);
// HBAOMaterial.SetMatrix("_WorldToViewMatrix", RenderCamera.worldToCameraMatrix);
}
private void RenderHBAO()
{
HBAOcmd.Clear();
Vector2Int wh = new Vector2Int(RenderCamera.pixelWidth, RenderCamera.pixelHeight);
HBAOMaterial.SetVector(ShaderSheets.InvTextureSize, new Vector2(
(float)resolution / wh[0],
(float)resolution / wh[1]
));
RenderTexture color = RenderTexture.GetTemporary(wh[0], wh[1]);
HBAOcmd.Blit(BuiltinRenderTextureType.CurrentActive,color);
RenderTexture tmp1 = RenderTexture.GetTemporary(wh[0]/ (int)resolution , wh[1] / (int)resolution);
HBAOcmd.Blit(color,tmp1,HBAOMaterial,0);
RenderTexture blurTex = RenderTexture.GetTemporary(wh[0] / (int)resolution, wh[1] / (int)resolution);
RenderTexture blur2Tex = RenderTexture.GetTemporary(wh[0] / (int)resolution, wh[1] / (int)resolution);
HBAOcmd.Blit(tmp1,blurTex,HBAOMaterial,2);
HBAOcmd.Blit(blurTex,blur2Tex,HBAOMaterial,3);
if (HBAODebug == HBAO_OutPass.Combined)
{
RenderTexture tmp2 = RenderTexture.GetTemporary(wh[0], wh[1]);
HBAOcmd.SetGlobalTexture("_AOTex",blur2Tex);
HBAOcmd.Blit(color,BuiltinRenderTextureType.CameraTarget,HBAOMaterial,1);
RenderTexture.ReleaseTemporary(tmp2);
}
else if(HBAODebug == HBAO_OutPass.AO)
{
HBAOcmd.Blit(tmp1, BuiltinRenderTextureType.CameraTarget);
}
else
{
HBAOcmd.Blit(blur2Tex, BuiltinRenderTextureType.CameraTarget);
}
RenderTexture.ReleaseTemporary(tmp1);
RenderTexture.ReleaseTemporary(blurTex);
RenderTexture.ReleaseTemporary(blur2Tex);
RenderTexture.ReleaseTemporary(color);
}
}
view raw HBAO.cs hosted with ❤ by GitHub
Shader "Hidden/HBAO"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
}
SubShader
{
// No culling or depth
Cull Off ZWrite Off ZTest Always
Pass
{
CGPROGRAM
#pragma enable_d3d11_debug_symbols
#pragma vertex HBAO_vert
#pragma fragment HBAO_frag
#include "HBAO.cginc"
ENDCG
}
Pass
{
CGPROGRAM
#pragma enable_d3d11_debug_symbols
#pragma vertex HBAO_vert
#pragma fragment HBAO_merge_frag
#include "HBAO.cginc"
ENDCG
}
Pass
{
CGPROGRAM
#pragma enable_d3d11_debug_symbols
#pragma vertex HBAO_vert
#pragma fragment horizontal_blur
#include "HBAO.cginc"
ENDCG
}
Pass
{
CGPROGRAM
#pragma enable_d3d11_debug_symbols
#pragma vertex HBAO_vert
#pragma fragment vertical_blur
#include "HBAO.cginc"
ENDCG
}
Pass
{
CGPROGRAM
#pragma enable_d3d11_debug_symbols
#pragma vertex HBAO_vert
#pragma fragment tap4blur
#include "HBAO.cginc"
ENDCG
}
}
}
view raw HBAO.shader hosted with ❤ by GitHub