音階波形の生成

環境

Unity2021.3.4f1

概要

プロシージャルに音階の波形を生成して、かえるのうたを流してみました。

そのままだとノイズがひどいので、AudioMixerを作成してLowpassフィルタを追加して値調整するとノイズが軽減されます。

using UnityEngine;

[RequireComponent(typeof(AudioSource))]
public class ProcedualAudio : MonoBehaviour
{
    private float _outputSampleRate;
    private float _bpm = 120;   //BPM60=1分間に四分音符を60回鳴らす速度

    private class NoteParam
    {
        public enum Note
        {
            C4,         //ド
            D4,         //レ
            E4,         //ミ
            F4,         //ファ
            G4,         //ソ
            A4,         //ラ
            B4,         //シ
        }
        private static float[] _tones = new float[]
        {
            261.626f,
            293.665f,
            329.628f,
            349.228f,
            391.995f,
            440.000f,
            493.883f,
        };
        public Note note;
        public float tone;
        public float beat;
        public NoteParam(Note note_, float beat_)
        {
            note = note_;
            tone = _tones[(int)note];
            beat = beat_;
        }

        public float GetTime(float bpm)
        {
            return (beat * 4.0f) / (bpm / 60.0f);
        }
    }
    private NoteParam[] _notes = new NoteParam[]
    {
        new NoteParam(NoteParam.Note.C4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.D4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.E4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.F4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.E4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.D4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.C4, 1.0f / 2.0f),
        new NoteParam(NoteParam.Note.E4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.F4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.G4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.A4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.G4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.F4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.E4, 1.0f / 2.0f),
        new NoteParam(NoteParam.Note.C4, 1.0f / 2.0f),
        new NoteParam(NoteParam.Note.C4, 1.0f / 2.0f),
        new NoteParam(NoteParam.Note.C4, 1.0f / 2.0f),
        new NoteParam(NoteParam.Note.C4, 1.0f / 2.0f),
        new NoteParam(NoteParam.Note.C4, 1.0f / 8.0f),
        new NoteParam(NoteParam.Note.C4, 1.0f / 8.0f),
        new NoteParam(NoteParam.Note.D4, 1.0f / 8.0f),
        new NoteParam(NoteParam.Note.D4, 1.0f / 8.0f),
        new NoteParam(NoteParam.Note.E4, 1.0f / 8.0f),
        new NoteParam(NoteParam.Note.E4, 1.0f / 8.0f),
        new NoteParam(NoteParam.Note.F4, 1.0f / 8.0f),
        new NoteParam(NoteParam.Note.F4, 1.0f / 8.0f),
        new NoteParam(NoteParam.Note.E4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.D4, 1.0f / 4.0f),
        new NoteParam(NoteParam.Note.C4, 1.0f / 2.0f),
    };
    private int _noteIndex = 0;
    private double _prevDspTime;

    private void Start()
    {
        _outputSampleRate = AudioSettings.outputSampleRate;
        _noteIndex = 0;
        _prevDspTime = AudioSettings.dspTime;
    }

    private void OnAudioFilterRead(float[] data, int channels)
    {
        var note = _notes[_noteIndex];
        var sec = note.GetTime(_bpm);
        var dspTime = AudioSettings.dspTime;
        var elapsedDspTime = dspTime - _prevDspTime;
        if(elapsedDspTime > sec)
        {
            _prevDspTime = dspTime;
            _noteIndex ++;
            _noteIndex %= _notes.Length;
            note = _notes[_noteIndex];
            sec = note.GetTime(_bpm);
        }
        //Debug.Log($"noteIndex:{_noteIndex} dspTime:{dspTime} sec:{sec} elapsedDspTime:{elapsedDspTime}");
        var begin = (float)(AudioSettings.dspTime % (1.0 / (double)note.tone));
        int currentSampleIndex = 0;
        var volume = Mathf.Lerp(1.0f, 0.5f,  (float)elapsedDspTime / sec);
        for (int i = 0; i< data.Length; i++)
        {
            float time = begin + (float)currentSampleIndex / _outputSampleRate;
            data[i] = volume * Mathf.Sin(2.0f * Mathf.PI * time * note.tone);
            currentSampleIndex++;
            if (channels == 2)
            {
                data[i + 1] = data[i];
                i++;
            }
        }
    }
}

感想

これをもっとちゃんとやったものがmidi再生ってことになるのでしょうか。

参考

音階周波数

Pythonで演奏してみた【Python】 - Fabeee Blog

フルスクリーンでレイマーチ(RayMarching)

環境

Unity2021.3.4f1

概要

フルスクリーンでRayMarchingをやってみました。

この10個のメタボールからなる形状とその影はRayMarchingで描画しています。

黄色の床はUnityのPlaneのGameObjectです。

「3D Objects > Quad」でQuadを作成して、カメラの階層に置き、このシェーダのマテリアルを設定すれば動作すると思います。

ClusterのワールドクラフトでRayMarchingを実装してみました。

cluster.mu

Clusterでは自作スクリプトを動作させることができないのでシェーダで全部やっています。 デスクトップ版でしか試してないのでVRだとまともに見えないかも知れません。

Shader "Custom/Raymarching"
{
    Properties
    {
        _RayStep("Ray Step", Range(16, 128)) = 32
        _RayDistanceMin("Ray Distance Min", Range(0.001, 0.1)) = 0.1
        _SpecularColor("Specular Color", Color) = (1,1,1,1)
        _SpecularExp("SpecularExp", Range(4, 128)) = 64
    }

    CGINCLUDE
    #include "UnityCG.cginc"
    #include "Lighting.cginc"

    struct appdata
    {
        float4 vertex : POSITION;
        float3 normal : NORMAL;
        float2 uv : TEXCOORD0;
    };

    struct fout
    {
        float4 col : SV_Target;
        float depth : SV_Depth;
    };

    uint _RayStep;
    float _RayDistanceMin;
    float4 _SpecularColor;
    float _SpecularExp;

    float4x4 inverse(float4x4 m)
    {
        float4x4 cofactors = float4x4(
             determinant(float3x3(m._22_23_24, m._32_33_34, m._42_43_44)),
            -determinant(float3x3(m._21_23_24, m._31_33_34, m._41_43_44)),
             determinant(float3x3(m._21_22_24, m._31_32_34, m._41_42_44)),
            -determinant(float3x3(m._21_22_23, m._31_32_33, m._41_42_43)),

            -determinant(float3x3(m._12_13_14, m._32_33_34, m._42_43_44)),
             determinant(float3x3(m._11_13_14, m._31_33_34, m._41_43_44)),
            -determinant(float3x3(m._11_12_14, m._31_32_34, m._41_42_44)),
             determinant(float3x3(m._11_12_13, m._31_32_33, m._41_42_43)),

             determinant(float3x3(m._12_13_14, m._22_23_24, m._42_43_44)),
            -determinant(float3x3(m._11_13_14, m._21_23_24, m._41_43_44)),
             determinant(float3x3(m._11_12_14, m._21_22_24, m._41_42_44)),
            -determinant(float3x3(m._11_12_13, m._21_22_23, m._41_42_43)),

            -determinant(float3x3(m._12_13_14, m._22_23_24, m._32_33_34)),
             determinant(float3x3(m._11_13_14, m._21_23_24, m._31_33_34)),
            -determinant(float3x3(m._11_12_14, m._21_22_24, m._31_32_34)),
             determinant(float3x3(m._11_12_13, m._21_22_23, m._31_32_33))
        );
        return transpose(cofactors) / determinant(m);
    }

    inline bool IsOrtho()
    {
        return UNITY_MATRIX_P._m33 == 1.0;
    }
    inline float3 GetCameraPos()
    {
        float3 cpos = mul(inverse(UNITY_MATRIX_V), float4(0, 0, 0, 1)).xyz;
        return cpos;
    }

    inline float3 GetOrthoCameraForward(float3 posWS)
    {
        float3 viewDirWS = UNITY_MATRIX_V[2].xyz;

        float3 rayDirWS = normalize(posWS - GetCameraPos());
        if (dot(viewDirWS, rayDirWS) < 0)
            viewDirWS *= -1;

        viewDirWS *= _ProjectionParams.x;
        return viewDirWS;
    }

    float3 GetViewDirectionWS(float3 posWS)
    {
        float3 rayDirWS;
        [branch]
        if (IsOrtho())
        {
            rayDirWS = GetOrthoCameraForward(posWS);
        }
        else
        {
            rayDirWS = normalize(posWS - GetCameraPos());
        }
        return rayDirWS;
    }

    inline float GetDepthNear()
    {
#if defined(UNITY_REVERSED_Z)
        float near = 1.0;
#else
        float near = 0.0;
#endif
        return near;
    }

    inline float GetDepthFar()
    {
#if defined(UNITY_REVERSED_Z)
        float far = 0.0;
#else
        float far = 1.0;
#endif
        return far;
    }

    inline float GetProjNear()
    {
        float near = GetDepthNear();
#if defined(SHADER_API_GLCORE) || defined(SHADER_API_OPENGL) || defined(SHADER_API_GLES) || defined(SHADER_API_GLES3)
        near = near * 2.0 - 1.0;
#endif
        return near;
    }

    inline float ComputeDepth(float4 posPS)
    {
        float z = posPS.z / posPS.w;
#if defined(SHADER_API_GLCORE) || defined(SHADER_API_OPENGL) || defined(SHADER_API_GLES) || defined(SHADER_API_GLES3)
        return z * 0.5 + 0.5;
#else 
        return z;
#endif 
    }

    void VertCommon(appdata v, inout float4 o_pos, inout float3 o_posWS, inout float3 o_rayWS, inout float3 o_lightWS, inout float3 o_viewWS)
    {
        o_pos = float4(v.uv * 2.0 - 1.0, GetProjNear(), 1.0);

        float4 posWS = mul(inverse(UNITY_MATRIX_VP), o_pos);
        o_posWS = posWS.xyz / posWS.w;
        o_rayWS = GetViewDirectionWS(o_posWS);
        o_lightWS = UnityWorldSpaceLightDir(posWS);
        o_viewWS = UnityWorldSpaceViewDir(posWS);
    }

    inline float DistSphare(float3 posWS, float3 spherePos)
    {
        float radius = 0.5;
        return length(posWS - spherePos) - radius;
    }

    inline float SmoothUnion(float d1, float d2, float k)
    {
        float h = clamp(0.5 + 0.5 * (d2 - d1) / k, 0.0, 1.0);
        return lerp(d2, d1, h) - k * h * (1.0 - h);
    }

    float Distance(float3 posWS)
    {
        float dist = 1000.0;
        for (uint i = 0; i < 10; i++)
        {
            float d = DistSphare(posWS, float3(0.75 * i, 0.0, sin(_Time.y + 1.0 * i)));
            dist = SmoothUnion(dist, d, 0.25);
        }
        return dist;
    }

    float3 Normal(float3 posWS)
    {
        float d = 0.0001;
        return normalize(float3(
            Distance(posWS + float3(d, 0.0, 0.0)) - Distance(posWS + float3(-d, 0.0, 0.0)),
            Distance(posWS + float3(0.0, d, 0.0)) - Distance(posWS + float3(0.0, -d, 0.0)),
            Distance(posWS + float3(0.0, 0.0, d)) - Distance(posWS + float3(0.0, 0.0, -d))
            ));
    }

    fout FragCommon(float3 i_posWS, float3 i_rayWS, float3 i_lightWS, float3 i_viewWS)
    {
        fout o;
        float3 posWS = i_posWS;
        float3 rayDirWS = normalize(i_rayWS);

        const float minDist = _RayDistanceMin;
        float dist = 1000.0;

        for (uint s = 0; s < _RayStep && dist > minDist; s++)
        {
            dist = Distance(posWS);
            posWS += rayDirWS * dist;
        }
        if (dist < minDist)
        {
#if defined(SHADOWS_DEPTH)
            o.col = 0.0;
#else
            float3 normalWS = Normal(posWS);
            float3 lightWS = normalize(i_lightWS);
            float3 viewWS = normalize(i_viewWS);
            fixed3 diffuse = _LightColor0.rgb * saturate(dot(normalWS, lightWS));
            fixed3 halfDirWS = normalize(lightWS + viewWS);
            fixed3 specular = _LightColor0.rgb * _SpecularColor.rgb * pow(saturate(dot(halfDirWS, normalWS)), _SpecularExp);
            fixed3 ambient = UNITY_LIGHTMODEL_AMBIENT.xyz;
            fixed4 finalColor = fixed4(ambient + diffuse + specular, 1.0);
            o.col = finalColor;
#endif
            float4 posPS = mul(UNITY_MATRIX_VP, float4(posWS, 1.0));
            o.depth = ComputeDepth(posPS);
        }
        else
        {
            o.col = 0.0;
            o.depth = GetDepthFar();
        }
        return o;
    }

    ENDCG

    SubShader
    {
        Tags { "Queue" = "Geometry-1"}
        Pass
        {
            Tags{"LightMode" = "ForwardBase" }
            LOD 100
            Cull Off
            Blend SrcAlpha OneMinusSrcAlpha
            Zwrite On
            ZTest LEqual

            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag

            struct v2f
            {
                float4 pos : SV_POSITION;
                float3 posWS : TEXCOORD1;
                float3 rayWS : TEXCOORD2;
                float3 lightWS : TEXCOORD3;
                float3 viewWS : TEXCOORD4;
            };

            v2f vert(appdata v)
            {
                v2f o;
                VertCommon(v, o.pos, o.posWS, o.rayWS, o.lightWS, o.viewWS);
                return o;
            }
            fout frag(v2f i)
            {
                fout o;
                o = FragCommon(i.posWS, i.rayWS, i.lightWS, i.viewWS);// , _WorldSpaceCameraPos);
                return o;
            }
            ENDCG
        }

        Pass
        {
            Name "ShadowCaster"
            Tags{"LightMode" = "ShadowCaster"}
            LOD 100
            Cull Off
            Zwrite On
            ZTest LEqual

            CGPROGRAM
            #pragma multi_compile_shadowcaster
            #pragma vertex vert
            #pragma fragment frag

            struct v2f
            {
                float4 pos : SV_POSITION;
                float3 posWS : TEXCOORD1;
                float3 rayWS : TEXCOORD2;
                float3 lightWS : TEXCOORD3;
                float3 viewWS : TEXCOORD4;
                //V2F_SHADOW_CASTER;
            };
            v2f vert(appdata v)
            {
                v2f o;
                VertCommon(v, o.pos, o.posWS, o.rayWS, o.lightWS, o.viewWS);
                //TRANSFER_SHADOW_CASTER_NORMALOFFSET(o);
                return o;
            }

            fout frag(v2f i)
            {
                fout o;
                o = FragCommon(i.posWS, i.rayWS, i.lightWS, i.viewWS);
                return o;
            }
            ENDCG
        }
    }
}

参考

【Unity】レイマーチング超入門チュートリアル前編。板ポリに球体を描く【ライティングあり】 | ぐるたかログ

視差遮蔽マッピング

環境

Unity2021.3.4f1

概要

視差遮蔽マッピング(ParallaxOcclusionMapping)です。

Depthを書き換えないシンプルなものです。

Shader "Custom/ParallaxOcclusionMapping"
{
    Properties
    {
        _MainTex ("MainTex", 2D) = "white" {}
        _NormalTex("NormalTex",2D) = "white"{}
        _HeightTex("DepthTex", 2D) = "white" {}
        _Height ("Height", Range(0.01, 0.3)) = 0.3
        _MinSamples ("MinSamples", int) = 16
        _MaxSamples ("MaxSamples", int) = 128
        _SpecularColor("Specular Color", Color) = (1,1,1,1)
        _SpecularExp ("SpecularExp", Range(32, 256)) = 64
    }

    CGINCLUDE
    #include "UnityCG.cginc"
    #include "Lighting.cginc"

    struct appdata
    {
        float4 position : POSITION;
        float2 texCoord : TEXCOORD0;
        float3 normal : NORMAL;
        float4 tangent : TANGENT;
    };
    struct v2f
    {
        float4 position : POSITION;
        float2 texCoord : TEXCOORD0;
        float3 lightTS : TEXCOORD1;
        float3 viewTS : TEXCOORD2;
        float2 parallaxOffsetTS : TEXCOORD3;
        float3 normalWS : TEXCOORD4;
        float3 viewWS : TEXCOORD5;
    };
    sampler2D _MainTex;
    float4 _MainTex_ST;
    sampler2D _NormalTex;
    sampler2D _HeightTex;
    float4 _HeightTex_TexelSize;
    float _Height;
    int _MinSamples;
    int _MaxSamples;
    float4 _SpecularColor;
    float _SpecularExp;

    v2f vert(appdata v)
    {
        v2f o = (v2f)0;

        o.position = UnityObjectToClipPos(v.position);
        o.texCoord = TRANSFORM_TEX(v.texCoord, _MainTex);
        o.normalWS = UnityObjectToWorldNormal(v.normal);
        o.viewWS = WorldSpaceViewDir(v.position);

        float3 lightWS = WorldSpaceLightDir(v.position);
        float3 tangentWS = UnityObjectToWorldDir(v.tangent.xyz);
        float3 binormalWS = cross(o.normalWS, tangentWS) * v.tangent.w;
        float3x3 worldToTangent = float3x3(tangentWS, binormalWS, o.normalWS);
        o.lightTS = mul(worldToTangent, lightWS);
        o.viewTS = mul(worldToTangent, o.viewWS);

        float2 parallaxDirection = normalize(o.viewTS.xy);
        float len = length(o.viewTS);
        float parallaxLength = sqrt(len * len - o.viewTS.z * o.viewTS.z) / o.viewTS.z;
        o.parallaxOffsetTS = parallaxDirection * parallaxLength;
        o.parallaxOffsetTS *= _Height;
        return o;
    }

    fixed4 frag(v2f i) : COLOR0
    {
        float3 viewTS = normalize(i.viewTS);
        float3 viewWS = normalize(i.viewWS);
        float3 lightTS = normalize(i.lightTS);
        float3 normalWS = normalize(i.normalWS);

        float2 dx, dy;
        dx = ddx(i.texCoord);
        dy = ddy(i.texCoord);

        float2 texSample = i.texCoord;
        int numSteps = (int)lerp(_MaxSamples, _MinSamples, dot(viewWS, normalWS));
        float currHeight = 0.0;
        float stepSize = 1.0 / (float)numSteps;
        float prevHeight = 1.0;
        float2 texOffsetPerStep = stepSize * i.parallaxOffsetTS;
        float2 texCurrentOffset = i.texCoord;
        float currentBound = 1.0;

        for(int step = 0; step < numSteps && currHeight < currentBound; step++)
        {
            texCurrentOffset -= texOffsetPerStep;
            prevHeight = currHeight;
            currHeight = tex2Dgrad(_HeightTex, texCurrentOffset, dx, dy).r;
            currentBound -= stepSize;
        }
        float2 pt1 = float2(currentBound, currHeight);
        float2 pt2 = float2(currentBound + stepSize, prevHeight);

        float delta2 = pt2.x - pt2.y;
        float delta1 = pt1.x - pt1.y;
        float parallaxAmount = (pt1.x * delta2 - pt2.x * delta1) / (delta2 - delta1);
        float2 parallaxOffset = i.parallaxOffsetTS * (1 - parallaxAmount);
        float2 texSampleBase = i.texCoord - parallaxOffset;
        texSample = texSampleBase;

        fixed4 baseColor = tex2Dgrad(_MainTex, texSample, dx, dy);
        float3 normalTS = normalize(UnpackNormal(tex2Dgrad(_NormalTex, texSample, dx, dy)));
        fixed3 diffuse = _LightColor0.rgb * baseColor.rgb * saturate(dot(normalTS, lightTS));
        fixed3 halfDirTS = normalize(lightTS + viewTS);
        fixed3 specular = _LightColor0.rgb * _SpecularColor.rgb * pow(saturate(dot(halfDirTS, normalTS)), _SpecularExp);
        fixed3 ambient = UNITY_LIGHTMODEL_AMBIENT.xyz * baseColor;
        fixed4 finalColor = fixed4(ambient + diffuse + specular, 1.0);
        return finalColor;
    }

    ENDCG

    SubShader
    {
        Tags { "RenderType"="Opaque" }
        LOD 100
        Pass
        {
            Tags {"LightMode" = "ForwardBase"}
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag
            ENDCG
        }
    }
}

感想

処理負荷が高いのでディスプレースメントマッピングのほうが良いでしょう。

参考

https://advances.realtimerendering.com/s2006/Chapter5-Parallax_Occlusion_Mapping_for_detailed_surface_rendering.pdf

http://maverickproj.web.fc2.com/pg97.html

DepthOffset付き

こちらはDepthを書き換えるタイプのものです。

Shader "Custom/ParallaxOcclusionMappingDepth"
{
    Properties
    {
        _MainTex ("MainTex", 2D) = "white" {}
        _NormalTex("NormalTex",2D) = "white"{}
        _HeightTex("DepthTex", 2D) = "white" {}
        _Height ("Height", Range(0.01, 0.3)) = 0.3
        _MinSamples ("MinSamples", int) = 16
        _MaxSamples ("MaxSamples", int) = 128
        _SpecularColor("Specular Color", Color) = (1,1,1,1)
        _SpecularExp ("SpecularExp", Range(32, 256)) = 64
        _DepthBufferScale("DepthBufferScale", Range(1.0, 5.0)) = 1.2
    }

    CGINCLUDE
    #include "UnityCG.cginc"
    #include "Lighting.cginc"

    struct appdata
    {
        float4 vertex : POSITION;
        float2 texCoord : TEXCOORD0;
        float3 normal : NORMAL;
        float4 tangent : TANGENT;
    };

    struct v2f
    {
        float4 position : POSITION;
        float2 texCoord : TEXCOORD0;
        float3 lightTS : TEXCOORD1;
        float3 viewTS : TEXCOORD2;
        float2 parallaxOffsetTS : TEXCOORD3;
        float3 normalWS : TEXCOORD4;
        float3 viewWS : TEXCOORD5;
        float3 posWS : TEXCOORD6;
    };

    struct fout
    {
        float4 col : SV_Target;
        float depth : SV_Depth;
    };

    sampler2D _MainTex;
    float4 _MainTex_ST;
    sampler2D _NormalTex;
    sampler2D _HeightTex;
    float4 _HeightTex_TexelSize;
    float _Height;
    int _MinSamples;
    int _MaxSamples;
    float4 _SpecularColor;
    float _SpecularExp;
    float _DepthBufferScale;

    v2f vert(appdata v)
    {
        v2f o = (v2f)0;

        o.position = UnityObjectToClipPos(v.vertex);
        o.texCoord = TRANSFORM_TEX(v.texCoord, _MainTex);
        o.normalWS = UnityObjectToWorldNormal(v.normal);
        o.viewWS = WorldSpaceViewDir(v.vertex);

        float3 lightWS = WorldSpaceLightDir(v.vertex);
        float3 tangentWS = UnityObjectToWorldDir(v.tangent.xyz);
        float3 binormalWS = cross(o.normalWS, tangentWS) * v.tangent.w;
        float3x3 worldToTangent = float3x3(tangentWS, binormalWS, o.normalWS);
        o.lightTS = mul(worldToTangent, lightWS);
        o.viewTS = mul(worldToTangent, o.viewWS);

        float2 parallaxDirection = normalize(o.viewTS.xy);
        float len = length(o.viewTS);
        float parallaxLength = sqrt(len * len - o.viewTS.z * o.viewTS.z) / o.viewTS.z;
        o.parallaxOffsetTS = parallaxDirection * parallaxLength;
        o.parallaxOffsetTS *= _Height;
        o.posWS = mul(UNITY_MATRIX_M, v.vertex);
        return o;
    }

    inline float ComputeDepth(float4 posPS)
    {
        float z = posPS.z / posPS.w;
#if defined(SHADER_API_GLCORE) || defined(SHADER_API_OPENGL) || defined(SHADER_API_GLES) || defined(SHADER_API_GLES3)
        return z * 0.5 + 0.5;
#else 
        return z;
#endif 
    }

    fout frag(v2f i)
    {
        fout o;
        float3 viewTS = normalize(i.viewTS);
        float3 viewWS = normalize(i.viewWS);
        float3 lightTS = normalize(i.lightTS);
        float3 normalWS = normalize(i.normalWS);

        float2 dx, dy;
        dx = ddx(i.texCoord);
        dy = ddy(i.texCoord);

        float2 texSample = i.texCoord;
        int numSteps = (int)lerp(_MaxSamples, _MinSamples, dot(viewWS, normalWS));

        float stepSize = 1.0 / (float)numSteps;
        float currHeight = 0.0;
        float prevHeight = 1.0;
        float2 texOffsetPerStep = stepSize * i.parallaxOffsetTS;
        float2 texCurrentOffset = i.texCoord;
        float currentBound = 1.0;

        for(int step = 0; step < numSteps && currHeight < currentBound; step++)
        {
            texCurrentOffset -= texOffsetPerStep;
            prevHeight = currHeight;
            currHeight = tex2Dgrad(_HeightTex, texCurrentOffset, dx, dy).r;
            currentBound -= stepSize;
        }
        float2 pt1 = float2(currentBound, currHeight);
        float2 pt2 = float2(currentBound + stepSize, prevHeight);

        float delta2 = pt2.x - pt2.y;
        float delta1 = pt1.x - pt1.y;
        float parallaxAmount = (pt1.x * delta2 - pt2.x * delta1) / (delta2 - delta1);
        float2 parallaxOffset = i.parallaxOffsetTS * (1 - parallaxAmount);
        float2 texSampleBase = i.texCoord - parallaxOffset;

        float3 posWS = i.posWS;
        float depthScale = _DepthBufferScale;
        float parallaxLength = length(i.parallaxOffsetTS / _Height);
        depthScale += depthScale * parallaxLength;
        float3 offsetWS = -viewWS * (1 - parallaxAmount) * depthScale;
        posWS += offsetWS;
        float4 posPS = mul(UNITY_MATRIX_VP, float4(posWS, 1.0));
        o.depth = ComputeDepth(posPS);

        texSample = texSampleBase;
        fixed4 baseColor = tex2Dgrad(_MainTex, texSample, dx, dy);
        float3 normalTS = normalize(UnpackNormal(tex2Dgrad(_NormalTex, texSample, dx, dy)));
        fixed3 diffuse = _LightColor0.rgb * baseColor.rgb * saturate(dot(normalTS, lightTS));
        fixed3 halfDirTS = normalize(lightTS + viewTS);
        fixed3 specular = _LightColor0.rgb * _SpecularColor.rgb * pow(saturate(dot(halfDirTS, normalTS)), _SpecularExp);
        fixed3 ambient = UNITY_LIGHTMODEL_AMBIENT.xyz * baseColor;
        o.col = fixed4(ambient + diffuse + specular, 1.0);
        return o;
    }

    ENDCG

    SubShader
    {
        Tags { "RenderType"="Opaque" }
        LOD 100
        Pass
        {
            Tags {"LightMode" = "ForwardBase"}
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag
            ENDCG
        }
    }
}

HSVで行う簡易水彩

環境

Unity2021.2.18f1

概要

ポストエフェクトで水彩っぽい表現です。

HSV変換し、適当にいい感じのパラメータに調整し、RGBに戻します。

階調化も行っています。

元は以下で、こはくちゃん達のモデルをStandardシェーダに変えています。

Shader "Hidden/WaterPaint"
{
    Properties
    {
        _MainTex ("Texture", 2D) = "white" {}
        _ToneCount("Tone Count", Range(1, 20)) = 17
        _ToneScale("Tone Scale", Range(0.1, 3)) = 1.272
        _GrayMin("Gray Min", Range(0.0, 1)) = 0.913
        _SaturateScale("Saturate Scale", Range(0.1, 2.0)) = 1
        _ValueMin("Value Min", Range(0.0, 1.0)) = 0.983
            
    }
    SubShader
    {
        Cull Off ZWrite Off ZTest Always

        Pass
        {
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag

            #include "UnityCG.cginc"

            struct appdata
            {
                float4 vertex : POSITION;
                float2 uv : TEXCOORD0;
            };

            struct v2f
            {
                float2 uv : TEXCOORD0;
                float4 vertex : SV_POSITION;
            };

            v2f vert (appdata v)
            {
                v2f o;
                o.vertex = UnityObjectToClipPos(v.vertex);
                o.uv = v.uv;
                return o;
            }

            sampler2D _MainTex;
            float _ToneCount;
            float _ToneScale;
            float _GrayMin;
            float _SaturateScale;
            float _ValueMin;

            float3 hsv2rgb(float3 c)
            {
                float4 K = float4(1.0, 2.0 / 3.0, 1.0 / 3.0, 3.0);
                float3 p = abs(frac(c.xxx + K.xyz) * 6.0 - K.www);
                return c.z * lerp(K.xxx, clamp(p - K.xxx, 0.0, 1.0), c.y);
            }
            float3 rgb2hsv(float3 c)
            {
                float4 K = float4(0.0, -1.0 / 3.0, 2.0 / 3.0, -1.0);
                float4 p = lerp(float4(c.bg, K.wz), float4(c.gb, K.xy), step(c.b, c.g));
                float4 q = lerp(float4(p.xyw, c.r), float4(c.r, p.yzx), step(p.x, c.r));

                float d = q.x - min(q.w, q.y);
                float e = 1.0e-10;
                return float3(abs(q.z + (q.w - q.y) / (6.0 * d + e)), d / (q.x + e), q.x);
            }


            fixed4 frag (v2f i) : SV_Target
            {
                fixed4 col = tex2D(_MainTex, i.uv);

                float gray = dot(col.rgb, fixed3(0.299, 0.587, 0.114));
                uint toneIndex = uint(gray * _ToneCount *_ToneScale);
                gray = float(toneIndex) / float(_ToneCount);
                gray = clamp(gray, _GrayMin, 1.0);
                float3 hsv = rgb2hsv(col.rgb);
                hsv.y = saturate(hsv.y + (1.0 - (gray * _SaturateScale)));
                hsv.z = clamp(gray, _ValueMin, 1.0);
                col.rgb = hsv2rgb(hsv);
                return col;
            }
            ENDCG
        }
    }
}

参考

【Defold】Shader:RGBとHSVを相互に色相変換する | KAZUPON研究室

UVで移動・回転・拡縮をする

環境

Unity2021.2.18f1

概要

UVで移動・回転・拡縮をしてみました。

AnimationCurveからKeyframe配列のtimeとvalueを抜き出し、シェーダでバイナリサーチをかけ、リニア補間した値を使用しています。

using Unity.Collections;
using UnityEditor;
using UnityEngine;

public unsafe class UvTransform : MonoBehaviour
{
    [SerializeField] Material _material = null;
    //リニア補間のみ
    [SerializeField] AnimationCurve _uvOffsetXs = null;
    [SerializeField] AnimationCurve _uvOffsetYs = null;
    [SerializeField] AnimationCurve _uvRotateZs = null;
    [SerializeField] AnimationCurve _uvScaleXs = null;
    [SerializeField] AnimationCurve _uvScaleYs = null;

    private static int _spUvOffsetXs = Shader.PropertyToID("_UvOffsetXs");
    private static int _spUvOffsetYs = Shader.PropertyToID("_UvOffsetYs");
    private static int _spUvRotateZs = Shader.PropertyToID("_UvRotateZs");
    private static int _spUvScaleXs  = Shader.PropertyToID("_UvScaleXs");
    private static int _spUvScaleYs  = Shader.PropertyToID("_UvScaleYs");
    private static int _spMaxTime  = Shader.PropertyToID("_MaxTime");

    private GraphicsBuffer[] _gbUvTransforms = null;
    private float _maxTime = 0.0f;
#if UNITY_EDITOR
    private void Reset()
    {
        _uvOffsetXs.AddKey(new Keyframe(0.0f, 0.0f));
        _uvOffsetXs.AddKey(new Keyframe(1.0f, 0.0f));
        _uvOffsetYs.AddKey(new Keyframe(0.0f, 0.0f));
        _uvOffsetYs.AddKey(new Keyframe(1.0f, 0.0f));
        _uvRotateZs.AddKey(new Keyframe(0.0f, 0.0f));
        _uvRotateZs.AddKey(new Keyframe(1.0f, 0.0f));
        _uvScaleXs.AddKey(new Keyframe(0.0f, 1.0f));
        _uvScaleXs.AddKey(new Keyframe(1.0f, 1.0f));
        _uvScaleYs.AddKey(new Keyframe(0.0f, 1.0f));
        _uvScaleYs.AddKey(new Keyframe(1.0f, 1.0f));

        var animCurves = new AnimationCurve[]
        {
            _uvOffsetXs, _uvOffsetYs,
            _uvRotateZs,
            _uvScaleXs, _uvScaleYs
        };
        foreach(var animCurve in animCurves)
        {
            for (int i = 0; i < animCurve.length; i++)
            {
                AnimationUtility.SetKeyLeftTangentMode(animCurve, i, AnimationUtility.TangentMode.Linear);
                AnimationUtility.SetKeyRightTangentMode(animCurve, i, AnimationUtility.TangentMode.Linear);
            }
        }
    }

    private void OnValidate()
    {
        Setup();
    }
#endif
    private void Setup()
    {
        if(Application.isPlaying == false)
            return;
        var animCurves = new AnimationCurve[]
        {
            _uvOffsetXs, _uvOffsetYs,
            _uvRotateZs,
            _uvScaleXs, _uvScaleYs
        };
        var shaderIds = new int[]
        {
            _spUvOffsetXs, _spUvOffsetYs,
            _spUvRotateZs,
            _spUvScaleXs, _spUvScaleYs
        };
        if(_gbUvTransforms == null)
        {
            _gbUvTransforms = new GraphicsBuffer[animCurves.Length];
            for(int i = 0; i < _gbUvTransforms.Length; i++)
            {
                var animCurve = animCurves[i];
                _gbUvTransforms[i] = new GraphicsBuffer(GraphicsBuffer.Target.Structured, animCurve.length, sizeof(Vector2));
            }
        }
        _maxTime = 0.0f;
        for(int i = 0; i < _gbUvTransforms.Length; i++)
        {
            var animCurve = animCurves[i];
            var keyValues = new NativeArray<Vector2>(animCurve.length, Allocator.Temp);
            for(int j = 0; j < animCurve.length; j++)
            {
                var keyframe = animCurve.keys[j];
                keyValues[j] = new Vector2(keyframe.time, keyframe.value);
            }
            if(_maxTime < keyValues[animCurve.length - 1].x)
                _maxTime = keyValues[animCurve.length - 1].x;
            _gbUvTransforms[i].SetData(keyValues);
            _material.SetBuffer(shaderIds[i], _gbUvTransforms[i]);
        }
        _material.SetFloat(_spMaxTime, _maxTime);
    }

    private void Start()
    {
        Setup();
    }

    private void OnDestroy()
    {
        if(_gbUvTransforms != null)
        {
            foreach(var gb in _gbUvTransforms)
            {
                gb.Dispose();
            }
            _gbUvTransforms = null;
        }
    }
}
Shader "Custom/UvTransform"
{
    Properties
    {
        _MainTex ("Texture", 2D) = "white" {}
    }

    CGINCLUDE
    #pragma target 4.5
    #include "UnityCG.cginc"

    struct appdata
    {
        float4 vertex : POSITION;
        float2 uv : TEXCOORD0;
    };

    struct v2f
    {
        float2 uv : TEXCOORD0;
        float4 vertex : SV_POSITION;
    };

    sampler2D _MainTex;
    float4 _MainTex_ST;
    StructuredBuffer<float2> _UvOffsetXs;
    StructuredBuffer<float2> _UvOffsetYs;
    StructuredBuffer<float2> _UvRotateZs;
    StructuredBuffer<float2> _UvScaleXs;
    StructuredBuffer<float2> _UvScaleYs;
    float _MaxTime;

    float SearchValue(float time, StructuredBuffer<float2> keyValues)
    {
        uint len;
        uint stride;
        keyValues.GetDimensions(len, stride);

        uint min = 0;
        uint max = len - 1;

        if (time <= keyValues[min].x)
            return keyValues[min].y;
        if (time >= keyValues[max].x)
            return keyValues[max].y;

        while ((max - min) > 1)
        {
            uint index = (min + max) / 2;
            if (keyValues[index].x < time)
            {
                min = index;
            }
            else //if (time < anims[index].x)
            {
                max = index;
            }
        }
        float r = (time - keyValues[min].x) / (keyValues[max].x - keyValues[min].x);
        return lerp(keyValues[min].y, keyValues[max].y, r);
    }

    float2 UvTransform(float2 tiling, float2 offset, float2 texcoord, float2 pos, float2 scale, float rotateZ)
    {
        float2 pivot = float2(0.5, 0.5);
        float cosAngle, sinAngle;

        sincos(rotateZ, sinAngle, cosAngle);
        float2 s = ((tiling - 1.0) + scale);
        float2x2 rotMatrix = float2x2(float2(cosAngle, -sinAngle), float2(sinAngle, cosAngle));
        float2 uv = texcoord - pivot;
        uv = mul(rotMatrix, uv) * s + pivot + -pos;
        uv += pivot * tiling + offset - pivot;
        return uv;
    }

    v2f vert(appdata v)
    {
        v2f o;
        o.vertex = UnityObjectToClipPos(v.vertex);

        float time = fmod(_Time.y, _MaxTime);
        float ox = SearchValue(time, _UvOffsetXs);
        float oy = SearchValue(time, _UvOffsetYs);
        float rz = SearchValue(time, _UvRotateZs);
        float sx = SearchValue(time, _UvScaleXs);
        float sy = SearchValue(time, _UvScaleYs);

        o.uv = UvTransform(_MainTex_ST.xy, _MainTex_ST.zw, v.uv, float2(ox, oy), float2(sx, sy), rz);
        return o;
    }

    fixed4 frag(v2f i) : SV_Target
    {
        fixed4 col = tex2D(_MainTex, i.uv);
        return col;
    }
    ENDCG

    SubShader
    {
        Tags { "RenderType"="Opaque" }
        LOD 100

        Pass
        {
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag
            ENDCG
        }
    }
}

遮蔽・可視テスト

環境

Unity2021.2.18f1

概要

遮蔽(もしくは逆に可視)テストによるグレアです。

OcclusionQueryのようなことを_CameraDepthTextureとComputeShaderを用いて行ってみました。

古典的な手法ですが、ブルームはガウスで行っているけど、光芒的なものも欲しい時に使えると思います。

using System.Collections;
using System.Collections.Generic;
using Unity.Collections;
using UnityEngine;
using UnityEngine.Rendering;

public unsafe class OcclusionTestGlare : MonoBehaviour
{
    [SerializeField] private GameObject[] _pointObjs = null;
    [SerializeField] private ComputeShader _computeShader = null;
    [SerializeField] private Material _matGlare = null;
    private const float _radius = 0.25f;
    private CommandBuffer _cbOcclusionTest = null;
    private CommandBuffer _cbGlare = null;
    private struct Occlusion
    {
        public Vector3 pos;
        public Vector3 screenPos;
        public float alpha;
    }
    private NativeArray<Occlusion> _occlusions;
    private GraphicsBuffer _gbOcclusions = null;
    private Camera _camera = null;
    private int _kernelIndex = -1;

    private static int _spRadius = Shader.PropertyToID("_Radius");
    private static int _spOcclusions = Shader.PropertyToID("_Occlusions");
    private static int _spViewProjMatrix = Shader.PropertyToID("_ViewProjMatrix");
    private static int _spCameraDepthTexture = Shader.PropertyToID("_CameraDepthTexture");

    private void OnDrawGizmos()
    {
        Gizmos.color = Color.yellow;
        for(int i = 0; i < _pointObjs.Length; i++)
        {
            Gizmos.DrawSphere(_pointObjs[i].transform.position, _radius);
        }
    }

    private void Setup()
    {
        if(_camera != null)
            return;

        _camera = Camera.main;
        _camera.depthTextureMode |= DepthTextureMode.Depth;
        _occlusions = new NativeArray<Occlusion>(_pointObjs.Length, Allocator.Persistent);
        for(int i = 0; i < _occlusions.Length; i++)
            _occlusions[i] = new Occlusion(){pos = _pointObjs[i].transform.position, alpha = 0.0f};
        _gbOcclusions = new GraphicsBuffer(GraphicsBuffer.Target.Structured, _pointObjs.Length, sizeof(Occlusion));
        _gbOcclusions.SetData(_occlusions);

        _kernelIndex = _computeShader.FindKernel("OcclusionTest");
        _computeShader.SetFloat(_spRadius, _radius);
        _computeShader.SetBuffer(_kernelIndex, _spOcclusions, _gbOcclusions);

        _cbOcclusionTest = new CommandBuffer();
        _cbOcclusionTest.name = "OcclusionTest";
        _cbOcclusionTest.DispatchCompute(_computeShader, _kernelIndex, _occlusions.Length, 1, 1);
        _camera.AddCommandBuffer(CameraEvent.AfterForwardOpaque, _cbOcclusionTest);


        _matGlare.SetBuffer(_spOcclusions, _gbOcclusions);

        _cbGlare = new CommandBuffer();
        _cbGlare.DrawProcedural(Matrix4x4.identity, _matGlare, 0, MeshTopology.Quads, _gbOcclusions.count * 4);
        _camera.AddCommandBuffer(CameraEvent.AfterImageEffects, _cbGlare);
    }

    private void OnDestroy()
    {
        if(_cbOcclusionTest != null)
            _cbOcclusionTest.Dispose();
        if(_occlusions.IsCreated == true)
            _occlusions.Dispose();
        if(_gbOcclusions != null)
            _gbOcclusions.Dispose();
    }

    private void LateUpdate()
    {
        var texture = Shader.GetGlobalTexture(_spCameraDepthTexture) as RenderTexture;
        if(texture == null)
            return;

        Setup();
        var projMatrix = _camera.projectionMatrix;
        var viewMatrix = _camera.worldToCameraMatrix;
        var viewProjMatrix = projMatrix * viewMatrix;

        //var zb = Shader.GetGlobalVector("_ZBufferParams");

        _computeShader.SetMatrix(_spViewProjMatrix, viewProjMatrix);
    }
}
#pragma kernel OcclusionTest

float _Radius;
struct Occlusion
{
    float3 pos;
    float3 screenPos;
    float alpha;
};

Texture2D<half> _CameraDepthTexture;
float4 _ZBufferParams;
float4 _ProjectionParams;
float4 _ScreenParams;
RWStructuredBuffer<Occlusion> _Occlusions;
float4x4 _ViewProjMatrix;

float3 GetScreenPos(float3 pos)
{
    float4 screenPos = mul(_ViewProjMatrix, float4(pos.x, pos.y, pos.z, 1.0f));
    screenPos.xy = screenPos.xy / screenPos.w;
    screenPos.xy = (screenPos.xy + 1.0) * 0.5 * _ScreenParams.xy;
    screenPos.z = screenPos.w * _ProjectionParams.w;
    return screenPos.xyz;
}

inline float Linear01Depth(float z)
{
    return 1.0 / (_ZBufferParams.x * z + _ZBufferParams.y);
}

[numthreads(1, 1, 1)]
void OcclusionTest(uint3 id : SV_DispatchThreadID)
{
    Occlusion occ = _Occlusions[id.x];
#if 0
    float3 offset = float3(_Radius, _Radius, 0);
    float3 sp0 = GetScreenPos(occ.pos - offset);
    float3 sp1 = GetScreenPos(occ.pos + offset);
    uint test = 0;

    uint height = sp1.y - sp0.y;
    uint width = sp1.x - sp0.x;
    for (uint y = (uint)sp0.y; y <= (uint)sp1.y; y++)
    {
        if(y < 0 || y > _ScreenParams.y - 1)
            continue;
        for(uint x = (uint)sp0.x; x <= (uint)sp1.x; x++)
        {
            if (x < 0 || x > _ScreenParams.x - 1)
                continue;
            float depth = Linear01Depth(_CameraDepthTexture[float2(x,y)].r);
            test += (sp0.z < depth) ? 1 : 0;
        }
    }
    occ.alpha = (float)test / (height * width);
    occ.screenPos = GetScreenPos(occ.pos);
#else
#if 1
#define PI 3.14159265359
    float size = _Radius * 2.0;
    float div = 16;
    float step = size / div;
    float stepRad = PI / (div + 2);
    uint test = 0;
    uint count = 0;
    for (float j = 0; j <= div; j++)
    {
        float h = j * step;
        float width = size * sin(stepRad * (j + 1));
        float ow = (size - width) * 0.5;
        for (float w = ow; w <= width; w += step, count++)
        {
            float3 offset = float3(-_Radius + w, -_Radius + h, 0);
            float3 sp = GetScreenPos(occ.pos + offset);
            if (sp.x < 0 || sp.x > _ScreenParams.x - 1)
                continue;
            if (sp.y < 0 || sp.y > _ScreenParams.y - 1)
                continue;
            float depth = Linear01Depth(_CameraDepthTexture[sp.xy].r);
            test += (sp.z < depth) ? 1 : 0;
        }
    }
    occ.alpha = (float)test / count;
    occ.screenPos = GetScreenPos(occ.pos);
#else
    float size = _Radius * 2.0;
    float div = 16;
    float step = size / div;
    uint test = 0;
    uint count = 0;
    for (float h = 0; h <= size; h += step)
    {
        for (float w = 0; w <= size; w += step)
        {
            float3 offset = float3(-_Radius + w, -_Radius + h, 0);
            if(_Radius < length(offset))
                continue;
            count++;
            float3 sp = GetScreenPos(occ.pos + offset);
            if (sp.x < 0 || sp.x > _ScreenParams.x - 1)
                continue;
            if (sp.y < 0 || sp.y > _ScreenParams.y - 1)
                continue;
            float depth = Linear01Depth(_CameraDepthTexture[sp.xy].r);
            test += (sp.z < depth) ? 1 : 0;
        }
    }
    occ.alpha = (float)test / count;
    occ.screenPos = GetScreenPos(occ.pos);
#endif
#endif
    _Occlusions[id.x] = occ;
}
Shader "Unlit/OcclusionTestGlare"
{
    Properties
    {
        _MainTex ("Texture", 2D) = "white" {}
        _Size("Glare Size", float) = 0.2
    }

    CGINCLUDE
    #include "UnityCG.cginc"

    struct Occlusion
    {
        float3 pos;
        float3 screenPos;
        float alpha;
    };
    StructuredBuffer<Occlusion> _Occlusions;

    struct appdata
    {
        uint vertexId : SV_VertexID;
    };

    struct v2f
    {
        float4 vertex : SV_POSITION;
        float2 uv : TEXCOORD0;
        float alpha : TEXCOORD1;
    };

    float _Size;

    v2f vert (appdata v)
    {
        v2f o;

        uint vertexIndex = v.vertexId % 4;
        float aspect = (float)_ScreenParams.y / (float)_ScreenParams.x;
        float3 quad[4] = 
        {
            float3(-1, -1, 0),
            float3( 1, -1, 0),
            float3( 1,  1, 0),
            float3(-1,  1, 0),
        };
        float3 pos = quad[vertexIndex] * float3(aspect, 1.0, 1.0) * _Size;

        uint occlusionIndex = v.vertexId / 4;
        Occlusion occ = _Occlusions[occlusionIndex];
        float2 screenPos = occ.screenPos;
        pos.xy += (screenPos / _ScreenParams.xy) * 2.0 - 1.0;
        o.vertex = float4(pos, 1.0);
        o.uv = quad[vertexIndex].xy * 0.5 + 0.5;
        o.alpha = occ.alpha;
        return o;
    }

    sampler2D _MainTex;

    fixed4 frag (v2f i) : SV_Target
    {
        fixed4 col = tex2D(_MainTex, i.uv) * i.alpha;
        return col;
    }

    ENDCG

    SubShader
    {
        Blend SrcAlpha One
        Cull Off ZWrite Off ZTest Always

        Pass
        {
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag
            ENDCG
        }
    }
}

DDX/DDY

環境

Unity2021.2.18f1

概要

ddxとddyは勾配を取得する命令です。

引数で渡した変数のとなりのピクセル(厳密には違う)との差分が得られます。

以下のシェーダはデプスとノーマルのそれぞれの差分を取り、エッジ検出をしてそこにAA的な処理(ただぼかしているだけ)を施しています。

Shader "Hidden/PostDdxy"
{
    Properties
    {
        _MainTex ("Texture", 2D) = "white" {}
        _EdgeNormal("Judge Edge Normal", float) = 0.99
        _EdgeDepth("Judge Edge Depth", float) = 0.001
    }
    SubShader
    {
        Cull Off ZWrite Off ZTest Always

        Pass
        {
            CGPROGRAM
            #pragma vertex vert
            #pragma fragment frag

            #include "UnityCG.cginc"

            struct appdata
            {
                float4 vertex : POSITION;
                float2 uv : TEXCOORD0;
            };

            struct v2f
            {
                float4 vertex : SV_POSITION;
                float2 uv : TEXCOORD0;
            };

            v2f vert (appdata v)
            {
                v2f o;
                o.vertex = UnityObjectToClipPos(v.vertex);
                o.uv = v.uv;
                return o;
            }

            sampler2D _MainTex;
            sampler2D _CameraDepthTexture;
            sampler2D _CameraDepthNormalsTexture;
            float _EdgeNormal;
            float _EdgeDepth;

            fixed3 antiAlias(v2f i, float3 rgb)
            {
                float2 offset = float2(1.0 - _ScreenParams.z, 1.0 - _ScreenParams.w);
#if 0
                float3 u = tex2D(_MainTex, i.uv + float2(0, offset.y)).rgb;
                float3 b = tex2D(_MainTex, i.uv + float2(0, -offset.y)).rgb;
                float3 l = tex2D(_MainTex, i.uv + float2(-offset.x, 0)).rgb;
                float3 r = tex2D(_MainTex, i.uv + float2(offset.x, 0)).rgb;
                rgb = (rgb + u + b + l + r) / 5.0;
#else
                float3 rgb0 = tex2D(_MainTex, i.uv + float2( 0.0, offset.y)).rgb;
                float3 rgb1 = tex2D(_MainTex, i.uv + float2( 0.87 * offset.x, -0.50 * offset.y)).rgb;
                float3 rgb2 = tex2D(_MainTex, i.uv + float2(-0.87 * offset.x, -0.50 * offset.y)).rgb;
                rgb = (rgb + rgb0 + rgb1 + rgb2) / 4.0;
#endif
                return rgb;
            }

            bool IsEdgeNormal(float2 uv)
            {
                float4 depthNormal = tex2D(_CameraDepthNormalsTexture, uv);
                float tempDepth;
                float3 normal;
                DecodeDepthNormal(depthNormal, tempDepth, normal);
                float3 diffNormalX = ddx(normal);
                float3 diffNormalY = ddy(normal);
                float3 normal2 = normal + (0 + diffNormalX + diffNormalY) / 3.0;
                float edgeNormal = dot(normal, normal2);
                return (edgeNormal < _EdgeNormal);
            }

            bool IsEdgeDepth(float2 uv)
            {
                float depth = LinearEyeDepth(tex2D(_CameraDepthTexture, uv).r) * _ProjectionParams.w;
                float edgeDepth = abs(ddx(depth)) + abs(ddy(depth));
                return (edgeDepth > _EdgeDepth);
            }

            bool IsEdge(float2 uv)
            {
                return IsEdgeDepth(uv) || IsEdgeNormal(uv);
            }

            fixed4 frag(v2f i) : SV_Target
            {
                float2 offset = float2(1.0 - _ScreenParams.z, 1.0 - _ScreenParams.w);
                float4 col = tex2D(_MainTex, i.uv);
                if (IsEdge(i.uv) || IsEdge(i.uv + offset))
                    col.rgb = antiAlias(i, col.rgb);
                //else
                //  col.rgb = 0.0;
                return col;
            }
            ENDCG
        }
    }
}
using UnityEngine;

public class PostDdxy : MonoBehaviour
{
    [SerializeField] private Material _material = null;

    private void Start()
    {
        Camera.main.depthTextureMode |= DepthTextureMode.DepthNormals;
    }
    private void OnRenderImage(RenderTexture source, RenderTexture destination)
    {
        Graphics.Blit(source, destination, _material);
    }
}

参考

ddx, ddyの挙動を調べてみた - もんしょの巣穴ブログ Ver2.0