Dynamically recalculating normals after vertex displacement - unity3d

Can anyone let me know if I'm on the right tack with this: I have a vertex shader that bumps outward dynamically depending on a point passed in (think a mouse running under a rug). In order for the lighting to update properly, I need to recalculate the normals after modifying the vertex position. I have access to each vertex point as well as the origin.
My current thinking is I do some sort of math to determine the tangent / bitangent and use a cross product to determine the normal. My math skills aren't great, what would I need to do to determine those vectors?
Here's my current vert shader:
void vert(inout appdata_full v)
{
float3 worldPos = mul(unity_ObjectToWorld, v.vertex).xyz;
float distanceToLift = distance(worldPos, _LiftOrigin);
v.vertex.y = smoothstep(_LiftHeight, 0, distanceToLift / _LiftRadius) * 5;
}

A simple solution is covered in this tutorial by Ronja, which I'll summarize here with modifications which reflect your specific case.
First, find two points offset from your current point by a small amount of tangent and bitangent (which you can calculate from normal and tangent):
float3 posPlusTangent = v.vertex + v.tangent * 0.01;
worldPos = mul(unity_ObjectToWorld, posPlusTangent).xyz;
distanceToLift = distance(worldPos, _LiftOrigin);
posPlusTangent.y = smoothstep(_LiftHeight, 0, distanceToLift / _LiftRadius) * 5;
float3 bitangent = cross(v.normal, v.tangent);
float3 posPlusBitangent = v.vertex + bitangent * 0.01;
worldPos = mul(unity_ObjectToWorld, bitangent).xyz;
distanceToLift = distance(worldPos, _LiftOrigin);
posPlusBitangent.y = smoothstep(_LiftHeight, 0, distanceToLift / _LiftRadius) * 5;
Then, find the difference between these offsets and the new vertex pos to find the new tangent and bitangent, then do another cross product to find the resulting normal:
float3 modifiedTangent = posPlusTangent - v.vertex;
float3 modifiedBitangent = posPlusBitangent - v.vertex;
float3 modifiedNormal = cross(modifiedTangent, modifiedBitangent);
v.normal = normalize(modifiedNormal);
Altogether:
float find_offset(float3 localV)
{
float3 worldPos = mul(unity_ObjectToWorld, localV).xyz;
float distanceToLift = distance(worldPos, _LiftOrigin);
return smoothstep(_LiftHeight, 0, distanceToLift / _LiftRadius) * 5;
}
void vert(inout appdata_full v)
{
v.vertex.y = find_offset(v.vertex);
float3 posPlusTangent = v.vertex + v.tangent * 0.01;
posPlusTangent.y = find_offset(posPlusTangent);
float3 bitangent = cross(v.normal, v.tangent);
float3 posPlusBitangent = v.vertex + bitangent * 0.01;
posPlusTangent.y = find_offset(posPlusBitangent);
float3 modifiedTangent = posPlusTangent - v.vertex;
float3 modifiedBitangent = posPlusBitangent - v.vertex;
float3 modifiedNormal = cross(modifiedTangent, modifiedBitangent);
v.normal = normalize(modifiedNormal);
}
This is a method of approximation, but it may be good enough!

Related

How to design a shader in Unity so it uses directions from the camera

I have found a few unique shaders from Shadertoy which I would like to impart an experience of 'sky' within models of buildings in Unity AR.
An example might be this one: https://www.shadertoy.com/view/4tdSWr which is just the sky looking up, or this one, https://www.shadertoy.com/view/4tdSWr which has some directional input from the mouse (click and drag) - the HLSL/unity version of this code except for changes to mouse input is at the end of this post.
Right now the clouds feel more like a green screen projection on the model, so there is no implication of direction or horizon if you are looking parallel to the plane the building is on. (ie, if I am standing with the clouds moving from right to left, as I turn left they don't appear to be moving from behind me and receding into the distance)
I have been trying to understand how to use the camera to 'rotate' the shader result so the direction of the camera is used to ensure the direction of the clouds movement. I would also like to use the angle of the camera with respect to the ground plane to impart a horizon when you are looking out towards the walls.
Any insight on how to do this would be great, especially if it is more than just a 'use _WorldSpaceCameraPos' or 'just add UNITY_MATRIX_MVP' as the results I found through excessive googling haven't really been that helpful so far.
The code for the second shader linked, adjusted for HLSL/Unity except for the mouse inputs is at the end of this post.
Shader "Unlit/skybox"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
iChannel0 ("noise-image", 2D) = "noise-image.png" {}
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
// make fog work
#pragma multi_compile_fog
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
UNITY_FOG_COORDS(1)
float4 vertex : SV_POSITION;
};
sampler2D _MainTex;
sampler2D iChannel0;
float4 _MainTex_ST;
float random(in float2 uv)
{
return tex2D(iChannel0, uv / 64.).r;
}
float noise(in float2 uv)
{
float2 i = floor(uv);
float2 f = frac(uv);
f = f * f * (3. - 2. * f);
float lb = random(i + float2(0., 0.));
float rb = random(i + float2(1., 0.));
float lt = random(i + float2(0., 1.));
float rt = random(i + float2(1., 1.));
return lerp(lerp(lb, rb, f.x),
lerp(lt, rt, f.x), f.y);
}
#define OCTAVES 8
float fbm(in float2 uv)
{
float value = 0.;
float amplitude = .5;
for (int i = 0; i < OCTAVES; i++)
{
value += noise(uv) * amplitude;
amplitude *= .5;
uv *= 2.;
}
return value;
}
float3 Sky(in float3 ro, in float3 rd)
{
const float SC = 1e5;
// Calculate sky plane
float dist = (SC - ro.y) / rd.y;
float2 p = (ro + dist * rd).xz;
p *= 1.2 / SC;
// from iq's shader, https://www.shadertoy.com/view/MdX3Rr
float3 lightDir = normalize(float3(-.8, .15, -.3));
float sundot = clamp(dot(rd, lightDir), 0.0, 1.0);
float3 cloudCol = float3(1.,1.0,1.0);
//float3 skyCol = float3(.6, .71, .85) - rd.y * .2 * float3(1., .5, 1.) + .15 * .5;
float3 skyCol = float3(0.3,0.5,0.85) - rd.y*rd.y*0.5;
skyCol = lerp( skyCol, mul(0.85, float3(0.7,0.75,0.85)), pow( 1.0 - max(rd.y, 0.0), 4.0 ) );
// sun
float3 sun = mul(mul(0.25 , float3(1.0,0.7,0.4)) , pow( sundot,5.0 ));
sun += mul(mul(0.25 , float3(1.0,0.8,0.6)) , pow( sundot,64.0 ));
sun += mul(mul(0.2 , float3(1.0,0.8,0.6)) , pow( sundot,512.0 ));
skyCol += sun;
// clouds
float t = mul(_Time.y , 0.1);
float den = fbm(float2(p.x - t, p.y - t));
skyCol = lerp( skyCol, cloudCol, smoothstep(.4, .8, den));
// horizon
skyCol = lerp( skyCol, mul(0.68 , float3(.418, .394, .372)), pow( 1.0 - max(rd.y, 0.0), 16.0 ) );
return skyCol;
}
float3x3 setCamera( in float3 ro, in float3 ta, float cr )
{
float3 cw = normalize(ta-ro);
float3 cp = float3(sin(cr), cos(cr),0.0);
float3 cu = normalize( cross(cw,cp) );
float3 cv = normalize( cross(cu,cw) );
return float3x3( cu, cv, cw );
}
void mainImage( out float4 fragColor, in float2 fragCoord )
{
float2 uv = fragCoord.xy / _ScreenParams.xy;
uv -= 0.5;
uv.x *= _ScreenParams.x / _ScreenParams.y;
float2 mouse = iMouse.xy/_ScreenParams.xy;
float3 ro = float3(0.0, 0.0, 0.0);
float3 ta = float3(cos(mul(mouse.x , 6.28)), mul(mouse.y , 2.0), sin(mul(mouse.x , 6.28)));
float3x3 cam = setCamera(ro, ta, 0.0);
float3 rd = normalize(mul(cam , float3(uv, 1.0)));
float3 col = Sky(ro, rd);
fragColor = float4(float3(col),1.0);
}
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
UNITY_TRANSFER_FOG(o,o.vertex);
return o;
}
fixed4 frag (v2f i) : SV_Target
{
// sample the texture
fixed4 col = tex2D(_MainTex, i.uv);
// apply fog
UNITY_APPLY_FOG(i.fogCoord, col);
return col;
}
ENDCG
}
}
}

How render 3D render texture in a Unity3D custom shader

Hi is it possible to render a 3d render texture on a custom shader raymarching like a 3dtexture?
I use a 3D render texture because I calculate and set the color of the 3D volume in a compute shader. I set the rendertexture 3D as shown below:
output3DRenderTexture= new RenderTexture(m_CubeDim.x, m_CubeDim.y, 0, thisTexFormat);
outpuoutput3DRenderTextureRendTex.enableRandomWrite = true;
output3DRenderTexture.dimension = UnityEngine.Rendering.TextureDimension.Tex3D;
output3DRenderTexture.volumeDepth = m_CubeDim.z;
output3DRenderTexture.Create();
I populate the 3D RenderTexture data in a a compute shader and GetData helps me confirm the 3d render texture has all the correct color data.
I can successfully render if I replace the 3D renderTex in the custom shader's sampler3D for a 3DTexture I create with the Tex2D slices.
cubeRenderer.material.SetTexture("_MainTex", output3DRenderTexture);//this does not render
versus
cubeRenderer.material.SetTexture("_MainTex", outputTexture3D);//this renders
This post in 2016 seems to suggest it's possible to render 3d render textures in custom shaders but it may now be outdated, it doesn't work for me and no error shows either.
It seems to me there maybe a significant performance hit if create the Textures2D slices in GPU, carry on with the creation of the Texture3D on CPU and re-send this Tex3D to GPU for the custom shader to consume it. After all the 3D volume already existed in GPU except as RenderTexture set as Tex3D. Thank you!
Shader:
#include "UnityCG.cginc"
#define ITERATIONS 100
#define PI2 6.28318530718
half4 _Color;
sampler3D _MainTex;
half _Intensity, _Threshold;
half3 _SliceMin, _SliceMax;
float4x4 _AxisRotationMatrix;
float _Angle;
struct Ray {
float3 origin;
float3 dir;
};
struct AABB {
float3 min;
float3 max;
};
// https http.download.nvidia.com/developer/presentations/2005/GDC/Audio_and_Slides/VolumeRendering_files/GDC_2_files/GDC_2005_VolumeRenderingForGames_files/Slide0073.htm
bool intersect(Ray r, AABB aabb, out float t0, out float t1)
{
float3 invR = 1.0 / r.dir;
float3 tbot = invR * (aabb.min - r.origin);
float3 ttop = invR * (aabb.max - r.origin);
float3 tmin = min(ttop, tbot);
float3 tmax = max(ttop, tbot);
float2 t = max(tmin.xx, tmin.yz);
t0 = max(t.x, t.y);
t = min(tmax.xx, tmax.yz);
t1 = min(t.x, t.y);
return t0 <= t1;
}
float3 get_uv(float3 p) {
return (p + 0.5);
}
float sample_volume(float3 uv, float3 p)
{
float v = tex3D(_MainTex, uv).r * _Intensity;
return v;
}
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float4 vertex : SV_POSITION;
float2 uv : TEXCOORD0;
float3 world : TEXCOORD1;
float3 local : TEXCOORD2;
};
v2f vert(appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
o.world = mul(unity_ObjectToWorld, v.vertex).xyz;
o.local = v.vertex.xyz;
return o;
}
fixed4 frag(v2f i) : SV_Target
{
Ray ray;
ray.origin = i.local;
// world space direction to object space
float3 dir = (i.world - _WorldSpaceCameraPos);
ray.dir = normalize(mul(unity_WorldToObject, dir));
AABB aabb;
aabb.min = float3(-0.5, -0.5, -0.5);
aabb.max = float3(0.5, 0.5, 0.5);
float tnear;
float tfar;
intersect(ray, aabb, tnear, tfar);
tnear = max(0.0, tnear);
// float3 start = ray.origin + ray.dir * tnear;
float3 start = ray.origin;
float3 end = ray.origin + ray.dir * tfar;
float dist = abs(tfar - tnear);
float step_size = dist / float(ITERATIONS);
float3 ds = normalize(end - start) * step_size;
float4 dst = float4(0, 0, 0, 0);
float3 p = start;
[unroll]
for (int iter = 0; iter < ITERATIONS; iter++)
{
float3 uv = get_uv(p);
float v = sample_volume(uv, p);
float4 src = float4(v, v, v, v);
src.a *= 0.5;
src.rgb *= src.a;
// blend
dst = (1.0 - dst.a) * src + dst;
p += ds;
if (dst.a > _Threshold) break;
}
return saturate(dst) * _Color;
}
#endif

How to get world-space UV derivative?

I am trying to implement this paper (https://storage.googleapis.com/pub-tools-public-publication-data/pdf/a0ed9e70a833a3e3ef0ad9efc1d979b59eedb8c4.pdf) into Unity, but I have a hard time understanding the different equations. I especially do not understand what are the "world-space UV gradients" described in Section 4.2.
I spent time implementing the equations 1 to 7, here's what I got so far in my fragment shader:
float w = 64.0;
float2 uGrad = float2(ddx(uv.x), ddy(uv.x));
float2 vGrad = float2(ddx(uv.y), ddy(uv.y));
float2 E = float2(-log2(w * length(uGrad)), -log2(w * length(vGrad)));
float2 uv0 = float2(pow(2.0, floor(E.x)) * uv.x, pow(2.0, floor(E.y)) * uv.y);
float2 uv1 = float2(2.0 * uv0.x, uv0.y);
float2 uv2 = float2(uv0.x, 2.0 * uv0.y);
float2 uv3 = float2(2.0 * uv0.x, 2.0 * uv0.y);
float2 blendCoef = float2(Blend(E.x - floor(E.x)), Blend(E.y - floor(E.y)));
float t0 = tex2D(_DisplacementTex, uv0);
float t1 = tex2D(_DisplacementTex, uv1);
float t2 = tex2D(_DisplacementTex, uv2);
float t3 = tex2D(_DisplacementTex, uv3);
return (1.0 - blendCoef.y) * ((1.0 - blendCoef.x) * t0 + blendCoef.x * t2) + blendCoef.y * ((1.0 - blendCoef.x) * t1 + blendCoef.x * t3);
and the Blend function returns this:
pow(-2.0 * x, 3.0) + pow(3.0 * x, 2.0);
Here's the result in Unity.
Now I am stuck at the 8th and 9th equations, that I have tried to implement in my vertex shader as:
float3 wNormal = UnityObjectToWorldNormal(v.normal);
float3 wTangent = UnityObjectToWorldDir(v.tangent);
float3 wBinormal = cross(wNormal, wTangent);
float3 vTangent = mul((float3x3)UNITY_MATRIX_V, wTangent);
float3 vBinormal = mul((float3x3)UNITY_MATRIX_V, wBinormal);
float2 uGradWS = 0.01;
float2 vGradWS = 0.01;
o.uGrad = length(uGradWS) * vTangent / length(vTangent);
o.vGrad = length(vGradWS) * vBinormal / length(vBinormal);
This is one of the first time I really try to implement something using only equations from the paper (I am usually able to find pre-existing implementation). I'd really like to be able to continue doing this in the future, but it is hard because of my lack of mathematical expertise.
Do you think that I am on the right track with what I already have?
Do you know how I could compute the world-space UV gradient?
My guess is that I would need to compute it on the CPU and pass it as extra vertex data.

How can I convert these fragColor code snippets from ShaderToy so that they will work in Unity?

I'm following this tutorial: https://www.youtube.com/watch?v=CzORVWFvZ28 to convert some code from ShaderToy to Unity. This is the shader that I'm attempting to convert: https://www.shadertoy.com/view/Ws23WD.
I saw that in the tutorial, he was able to take his fragColor statement from ShaderToy and simply return a color in Unity instead. However, when I tried doing that with the code that I have from ShaderToy, an error about not being able to implicitly convert from float3 to float4 popped up. I saw that my color variable is being declared as a float3 which is what must be causing the issue, but I need some help figuring out how to fix this.
I also noticed that I have an 'a' value with the fragColor variable, in addition to the rgb values; would I use a float4 to take in the (r, g, b, a) values?
fixed4 frag (v2f i) : SV_Target
{
//float2 uv = float2(fragCoord.x / iResolution.x, fragCoord.y / iResolution.y);
float2 uv = float2(i.uv);
uv -= 0.5;
//uv /= float2(iResolution.y / iResolution.x, 1);
float3 cam = float3(0, -0.15, -3.5);
float3 dir = normalize(float3(uv,1));
float cam_a2 = sin(_Time.y) * pi * 0.1;
cam.yz = rotate(cam.yz, cam_a2);
dir.yz = rotate(dir.yz, cam_a2);
float cam_a = _Time.y * pi * 0.1;
cam.xz = rotate(cam.xz, cam_a);
dir.xz = rotate(dir.xz, cam_a);
float3 color = float3(0.16, 0.12, 0.10);
float t = 0.00001;
const int maxSteps = 128;
for(int i = 0; i < maxSteps; ++i) {
float3 p = cam + dir * t;
float d = scene(p);
if(d < 0.0001 * t) {
color = float3(1.0, length(p) * (0.6 + (sin(_Time.y*3.0)+1.0) * 0.5 * 0.4), 0);
break;
}
t += d;
}
//fragColor.rgb = color;
return color;
//fragColor.a = 1.0;
}

Get the tangent from a normal direction cg unity3d

I am writing a shader in cg where I displace the vertexes. Because I displace the vertexes, I recalculate the normals so they point away from the surface and give that information to the fragment function. In the shader I also implemented a normal map, now am I wondering shouldn't I also recalculate the tangents? And is there a formula to calculate the tangent? I read that it is on a 90 degrees angle of the normal, could I use the cross product for that?
I want to pass on the right tangent to VOUT.tangentWorld. This is my vertex function:
VertexOutput vert (VertexInput i)
{
VertexOutput VOUT;
// put the vert in world space
float4 newVert = mul(_Object2World,i.vertex);
// create fake vertexes
float4 v1 = newVert + float4(0.05,0.0,0.0,0.0) ; // X
float4 v2 = newVert + float4(0.0,0.0,0.05,0.0) ; // Z
// assign the displacement map to uv coords
float4 disp = tex2Dlod(_Displacement, float4(newVert.x + (_Time.x * _Speed), newVert.z + (_Time.x * _Speed),0.0,0.0));
float4 disp2 = tex2Dlod(_Displacement, float4(v1.x + (_Time.x * _Speed), newVert.z + (_Time.x * _Speed),0.0,0.0));
float4 disp3 = tex2Dlod(_Displacement, float4(newVert.x + (_Time.x * _Speed), v2.z + (_Time.x * _Speed),0.0,0.0));
// offset the main vert
newVert.y += _Scale * disp.y;
// offset fake vertexes
v1 += _Scale * disp2.y;
v2 += _Scale * disp3.y;
// calculate the new normal direction
float3 newNor = cross(v2 - newVert, v1 - newVert);
// return world position of the vert for frag calculations
VOUT.posWorld = newVert;
// set the vert back in object space
float4 vertObjectSpace = mul(newVert,_World2Object);
// apply unity mvp matrix to the vert
VOUT.pos = mul(UNITY_MATRIX_MVP,vertObjectSpace);
//return the tex coords for frag calculations
VOUT.tex = i.texcoord;
// return normal, tangents, and binormal information for frag calculations
VOUT.normalWorld = normalize( mul(float4(newNor,0.0),_World2Object).xyz);
VOUT.tangentWorld = normalize( mul(_Object2World,i.tangent).xyz);
VOUT.binormalWorld = normalize( cross(VOUT.normalWorld, VOUT.tangentWorld) * i.tangent.w);
return VOUT;
}
Isn't it just the vector v2 - newVert or v1 - newVert because the point along the surface? And how do I know which one of the two it is?
I've used something like the below:
Vector3 tangent = Vector3.Cross( normal, Vector3.forward );
if( tangent.magnitude == 0 ) {
tangent = Vector3.Cross( normal, Vector3.up );
}
Source: http://answers.unity3d.com/questions/133680/how-do-you-find-the-tangent-from-a-given-normal.html