Alpha channel not working correctly on vertex shader - unity3d

I seem tobe having some issues with the order its drawning or something like that.
Im using vertex colors.
If I set the alpha of the bottom vertices to 1 again I still get that draw issue on the top of the pillars.
If I set the queue to opaque it renders correctly, however the alpha renders as white (which would be expected as no transparencies in opaque queue)
Shader "Custom/VertexColors2" {
Properties {
_Color ("Color", Color) = (0,0,0,0)
_MainTex ("Albedo (RGB)", 2D) = "white" {}
_GridTex ("Grid Texture", 2D) = "white" {}
_Glossiness ("Smoothness", Range(0,1)) = 0.5
_Metallic ("Metallic", Range(0,1)) = 0.0
}
SubShader {
Tags {"Queue"="Transparent" "IgnoreProjector"="True" "RenderType"="Transparent"}
CGPROGRAM
#pragma surface surf Standard alpha
#pragma target 3.5
sampler2D _MainTex;
sampler2D _GridTex;
struct Input {
float2 uv_MainTex;
float4 color : COLOR;
float3 worldPos;
};
half _Glossiness;
half _Metallic;
fixed4 _Color;
void surf (Input IN, inout SurfaceOutputStandard o) {
fixed4 c = tex2D(_MainTex, IN.uv_MainTex) * _Color;
float2 gridUV = IN.worldPos.xz;
gridUV.x *= 1 / (4 * 8.66025404);
gridUV.y *= 1 / (2 * 15.0);
fixed4 grid = tex2D(_GridTex, gridUV);
o.Albedo = c.rgb * IN.color * grid;
o.Metallic = _Metallic;
o.Smoothness = _Glossiness;
o.Alpha = IN.color.a;
}
ENDCG
}
FallBack "Standard"
}

I ended up adding a pass before the cg starts and it seem to be doing what I want it to do, still needs a little more tweaking though.
Shader "Custom/VertexColors2" {
Properties {
_Color ("Color", Color) = (0,0,0,0)
_MainTex ("Albedo (RGB)", 2D) = "white" {}
_GridTex ("Grid Texture", 2D) = "white" {}
_Glossiness ("Smoothness", Range(0,1)) = 0.5
_Metallic ("Metallic", Range(0,1)) = 0.0
}
SubShader {
Tags { "Queue"="Transparent" "IgnoreProjector"="True" "RenderType"="Opaque"}
LOD 200
Pass {
Cull Off
Blend One OneMinusSrcAlpha
}
ZWrite Off
CGPROGRAM
#pragma surface surf Standard fullforwardshadows alpha:blend
#pragma target 3.5
sampler2D _MainTex;
sampler2D _GridTex;
struct Input {
float2 uv_MainTex;
float4 color : COLOR;
float3 worldPos;
};
half _Glossiness;
half _Metallic;
fixed4 _Color;
void surf (Input IN, inout SurfaceOutputStandard o) {
fixed4 c = tex2D(_MainTex, IN.uv_MainTex) * _Color;
float2 gridUV = IN.worldPos.xz;
gridUV.x *= 1 / (4 * 8.66025404);
gridUV.y *= 1 / (2 * 15.0);
fixed4 grid = tex2D(_GridTex, gridUV);
o.Albedo = c.rgb * IN.color * grid;
o.Metallic = _Metallic;
o.Smoothness = _Glossiness;
o.Alpha = IN.color.a;
}
ENDCG
}
FallBack "Diffuse"
}

Related

Custom shader casts shadows but cannot receive them

I have two shaders for my 2d sprites to cast shadows when they are "stood up" 10 degrees off a flat plane.
This one casts and receives shadows, but I cannot use the alpha channel, transparency is entirely ignored
Shader "Custom/SpriteShadow"
{
Properties {
_Color ("Color", Color) = (1,1,1,1)
[PerRendererData]_MainTex ("Sprite Texture", 2D) = "white" {}
_Cutoff("Shadow alpha cutoff", Range(0,1)) = 0.5
}
SubShader {
Tags
{
"Queue"="Geometry"
"RenderType"="TransparentCutout"
}
LOD 200
Cull Off
CGPROGRAM
// Lambert lighting model, and enable shadows on all light types
#pragma surface surf Lambert addshadow fullforwardshadows
// Use shader model 3.0 target, to get nicer looking lighting
#pragma target 3.0
sampler2D _MainTex;
fixed4 _Color;
fixed _Cutoff;
struct Input
{
float2 uv_MainTex;
};
void surf (Input IN, inout SurfaceOutput o) {
fixed4 c = tex2D (_MainTex, IN.uv_MainTex) * _Color;
o.Albedo = c.rgb;
o.Alpha = c.a;
clip(o.Alpha - _Cutoff);
}
ENDCG
}
FallBack "Diffuse"
}
The second shader can cast shadows and works with alpha/transparency, but cannot receive shadows.
Shader "Custom/SpriteShadowWithAlpha"{
Properties
{
[PerRendererData] _MainTex("Texture", 2D) = "white" {}
_EffectColor1("Effect Color", Color) = (1,1,1,1)
_Crossfade("Fade", float) = 0
_FlashColor("Flash Color", Color) = (1,1,1,1)
_FlashAmount("Flash Amount",Range(0.0,1.0)) = 0
_Cutoff("Alpha Cutoff", Range(0,1)) = 0.9
_Color ("Color", Color) = (1,1,1,1)
[Toggle(_ALPHABLEND_ON)] ALPHABLEND_ON("Enable Dithered Shadows", Float) = 0.0
}
SubShader
{
Tags
{
"Queue" = "Transparent"
"IgnoreProjector" = "True"
"RenderType" = "TransparentCutOut"
"PreviewType" = "Plane"
"CanUseSpriteAtlas" = "True"
}
Cull Off
Lighting Off
ZWrite Off
Blend SrcAlpha OneMinusSrcAlpha
CGPROGRAM
#pragma surface surf Lambert alpha:blend fullforwardshadows alphatest:_Cutoff
#pragma target 3.0
struct Input {
fixed2 uv_MainTex;
fixed4 color : COLOR;
};
sampler2D _MainTex;
fixed4 _EffectColor1;
fixed _Crossfade;
fixed4 _FlashColor;
float _FlashAmount;
void surf(Input IN, inout SurfaceOutput o)
{
fixed4 col = tex2D(_MainTex, IN.uv_MainTex);
fixed4 returnColor = lerp(col, col * _EffectColor1, _Crossfade) * _EffectColor1.a + col * (1.0 - _EffectColor1.a);
o.Albedo = returnColor.rgb * IN.color.rgb;
o.Alpha = col.a * IN.color.a;
o.Albedo = lerp(o.Albedo,_FlashColor.rgb,_FlashAmount);
}
ENDCG
}
Fallback "Standard"
}
I have tried mixing and matching different parts of the two but can never get shadows to cast and receive and work with the alpha channel.
This may not be the answer anyone in the future wants to see, but it is the one I went with. Originally I was unable to figure out a URP shader to do what I wanted. I was however able to find one that suited my needs later on.
So my solution was to upgrade to URP and ditch the standard shader. Using the following video as a guide and shader source
https://www.youtube.com/watch?v=d_OBjV7c1CY

Unity3d standard surface particle stream shader error in unity 2019

I've been trying to make the shader example from unity's website to compile in unity3d 2019.
Shader "Custom/test" {
Properties {
_Color ("Color", Color) = (1,1,1,1)
_MainTex ("Albedo (RGB)", 2D) = "white" {}
_Glossiness ("Smoothness", Range(0,1)) = 0.5
_Metallic ("Metallic", Range(0,1)) = 0.0
}
SubShader {
Tags {"Queue"="Transparent" "RenderType"="Transparent"}
Blend SrcAlpha OneMinusSrcAlpha
ZWrite off
LOD 200
CGPROGRAM
// Physically based Standard lighting model, and enable shadows on all light types
#pragma surface surf Standard alpha vertex:vert
// Use shader model 3.0 target, to get nicer looking lighting
#pragma target 3.0
sampler2D _MainTex;
struct appdata_particles {
float4 vertex : POSITION;
float3 normal : NORMAL;
float4 color : COLOR;
float4 texcoords : TEXCOORD0;
float texcoordBlend : TEXCOORD1;
};
struct Input {
float2 uv_MainTex;
float2 texcoord1;
float blend;
float4 color;
};
void vert(inout appdata_particles v, out Input o) {
UNITY_INITIALIZE_OUTPUT(Input,o);
o.uv_MainTex = v.texcoords.xy;
o.texcoord1 = v.texcoords.zw;
o.blend = v.texcoordBlend;
o.color = v.color;
}
half _Glossiness;
half _Metallic;
fixed4 _Color;
void surf (Input IN, inout SurfaceOutputStandard o) {
fixed4 colA = tex2D(_MainTex, IN.uv_MainTex);
fixed4 colB = tex2D(_MainTex, IN.texcoord1);
fixed4 c = 2.0f * IN.color * lerp(colA, colB, IN.blend) * _Color;
o.Albedo = c.rgb;
// Metallic and smoothness come from slider variables
o.Metallic = _Metallic;
o.Smoothness = _Glossiness;
o.Alpha = c.a;
}
ENDCG
}
FallBack "Diffuse"
}
But the compilation gives an error:
Shader error in 'Custom/test': invalid subscript 'texcoord' at line 157 (on d3d11)
I suspect there is a difference between unity 2017 for which the example was made and unity 2019, but i can't figure out what the problem is.
Managed to figure it out:
Shader "Custom/StandardTransparentQueue" {
Properties {
_Color ("Color", Color) = (1,1,1,1)
_MainTex ("Albedo (RGB)", 2D) = "white" {}
_Glossiness ("Smoothness", Range(0,1)) = 0.5
_Metallic ("Metallic", Range(0,1)) = 0.0
}
SubShader {
Tags {"Queue"="Transparent" "RenderType"="Transparent"}
Blend SrcAlpha OneMinusSrcAlpha
ZWrite off
LOD 200
CGPROGRAM
// Physically based Standard lighting model, and enable shadows on all light types
#pragma surface surf Standard alpha vertex:vert
// Use shader model 3.0 target, to get nicer looking lighting
#pragma target 3.0
sampler2D _MainTex;
struct appdata_particles {
float4 vertex : POSITION;
float3 normal : NORMAL;
float4 color : COLOR;
float4 texcoords : TEXCOORD0;
float texcoordBlend : TEXCOORD1;
};
struct Input {
float2 texcoord;
float2 texcoord1;
float blend;
float4 color;
};
void vert(inout appdata_particles v, out Input o) {
UNITY_INITIALIZE_OUTPUT(Input,o);
o.texcoord = v.texcoords.xy;
o.texcoord1 = v.texcoords.zw;
o.blend = v.texcoordBlend;
o.color = v.color;
}
half _Glossiness;
half _Metallic;
fixed4 _Color;
void surf (Input IN, inout SurfaceOutputStandard o) {
fixed4 colA = tex2D(_MainTex, IN.texcoord);
fixed4 colB = tex2D(_MainTex, IN.texcoord1);
fixed4 c = 2.0f * IN.color * lerp(colA, colB, IN.blend) * _Color;
o.Albedo = c.rgb;
// Metallic and smoothness come from slider variables
o.Metallic = _Metallic;
o.Smoothness = _Glossiness;
o.Alpha = c.a;
}
ENDCG
}
FallBack "Diffuse"
}
In unity 2019 it expects texcoord to be a member of the Input structure instead of uv_MainTex.
Hope it helps others :)

Making a mesh partly semi-transparent and still receive correct shadows

I am creating some game like Minecraft, but not in a first person view, but rather in a top down view.
The problem is, as soon as you dig inside something, the player (obviously) can't be seen anymore. Because of that, I tried to make a shader that cuts a hole in a mesh, so that you can see where you are underground.
I'm really bad with shaders, but somehow I managed to code something like that.
As you can see here, the hole cuts also a hole in the shadow casting, although the actual ceiling should still be there.
This is the code:
Shader "Custom/NewSurfaceShader"
{
Properties
{
_Color ("Color", Color) = (1,1,1,1)
_MainTex ("Albedo (RGB)", 2D) = "white" {}
_Glossiness ("Smoothness", Range(0,1)) = 0.5
_Metallic ("Metallic", Range(0,1)) = 0.0
_Cutoff("Cutoff", Range(0, 1)) = 0.5
_PlayerPosition ("Player Position", Vector) = (0, 0, 0)
}
SubShader
{
Tags { "Queue"="Geometry" }
CGPROGRAM
#pragma surface surf Standard fullforwardshadows addshadow
#pragma target 3.0
sampler2D _MainTex;
struct Input
{
float2 uv_MainTex;
float3 worldPos;
};
half _Glossiness;
half _Metallic;
fixed4 _Color;
fixed3 _PlayerPosition;
void surf (Input IN, inout SurfaceOutputStandard o)
{
fixed4 c = tex2D (_MainTex, IN.uv_MainTex) * _Color;
o.Albedo = c.rgb;
o.Metallic = _Metallic;
o.Smoothness = _Glossiness;
if (distance(IN.worldPos.xz, _PlayerPosition.xz) < 7 && IN.worldPos.y > _PlayerPosition.y)
{
clip(-1);
}
}
ENDCG
Tags {"Queue"="Transparent" "IgnoreProjector"="True" "RenderType"="Transparent"}
CGPROGRAM
#pragma surface surf Standard addshadow alpha:fade
#pragma target 3.0
sampler2D _MainTex;
struct Input
{
float2 uv_MainTex;
float3 worldPos;
};
half _Glossiness;
half _Metallic;
fixed4 _Color;
fixed3 _PlayerPosition;
void surf (Input IN, inout SurfaceOutputStandard o)
{
fixed4 c = tex2D (_MainTex, IN.uv_MainTex) * _Color;
o.Albedo = c.rgb;
o.Metallic = 0;
o.Smoothness = _Glossiness;
if (distance(IN.worldPos.xz, _PlayerPosition.xz) < 7 && IN.worldPos.y > _PlayerPosition.y)
{
o.Albedo *= 0;
o.Alpha = 0.2;
}
else
{
discard;
}
}
ENDCG
}
FallBack Off
}
Is there a way to cast a shadow without that see-through hole? And is there an overall better way to write such a shader somehow?
Would be grateful for any answers, cheers!

Does Unity3D custom shader work with its GPU skinning automatically?

Hi I'm writing my own shader in mobile, but was wondering if I will break the GPU skinning feature since it's using vertex shader also? My shader is as below:
Shader "Mobile Custom/Specular Map" {
Properties {
_ShininessColor("Shininess Color", Color) = (1,1,1,1)
_Shininess ("Shininess", Range (0.03, 1)) = 0.078125
_MainTex ("Base (RGB)", 2D) = "white" {}
_SpecMap ("Specular Map", 2D) = "white" {}
}
SubShader {
Tags { "RenderType"="Opaque" }
LOD 250
Cull Back
CGPROGRAM
#pragma surface surf MobileBlinnPhong exclude_path:prepass nolightmap halfasview interpolateview noshadow nofog nometa nolppv noshadowmask
inline fixed4 LightingMobileBlinnPhong (SurfaceOutput s, fixed3 lightDir, fixed3 halfDir, fixed atten) {
fixed diff = max (0, dot (s.Normal, lightDir));
fixed nh = max (0, dot (s.Normal, halfDir));
fixed spec = pow (nh, s.Specular * 128) * s.Gloss;
fixed4 c;
c.rgb = (s.Albedo * _LightColor0.rgb * diff + _LightColor0.rgb * spec) * atten;
UNITY_OPAQUE_ALPHA(c.a);
return c;
}
sampler2D _MainTex;
sampler2D _SpecMap;
uniform float4 _ShininessColor;
half _Shininess;
struct Input {
float2 uv_MainTex;
};
void surf (Input IN, inout SurfaceOutput o) {
fixed4 tex = tex2D(_MainTex, IN.uv_MainTex);
o.Albedo = tex.rgb;
o.Gloss = tex2D(_SpecMap, IN.uv_MainTex) * _ShininessColor;
o.Alpha = tex.a;
o.Specular = _Shininess;
}
ENDCG
}
FallBack "Mobile/VertexLit"
}
According to here, if using custom shader with instancing, we need to add the instancing shader in our custom shader. Does it apply for GPU skinning too?

Combining Two Shaders into One Shader

Unity Project, Want to combine these two shaders into one shader to get both of their functionality. One shader is for lighting, the other shader is for rendering better. How do I combine?
Shader "Transparent/Cutout/Lit3dSprite" {
Properties{
_MainCol("Main Tint", Color) = (1,1,1,1)
_MainTex("Main Texture", 2D) = "white" {}
_Cutoff("Alpha cutoff", Range(0,1)) = 0.5
}
SubShader{
Tags {"Queue" = "AlphaTest" "IgnoreProjector" = "True" "RenderType" = "TransparentCutout" "PreviewType" = "Plane"}
Cull Off
LOD 200
CGPROGRAM
#pragma surface surf SimpleLambert alphatest:_Cutoff addshadow fullforwardshadows
#pragma target 3.0
sampler2D _MainTex;
fixed4 _MainCol;
half4 LightingSimpleLambert(SurfaceOutput s, half3 lightDir, half atten)
{
half4 c;
c.rgb = s.Albedo * _MainCol.rgb * (atten)* _LightColor0.rgb;
c.a = s.Alpha;
return c;
}
struct Input {
float2 uv_MainTex;
};
void surf(Input IN, inout SurfaceOutput o) {
fixed4 c = tex2D(_MainTex, IN.uv_MainTex) * _MainCol;
o.Albedo = lerp(c.rgb, c.rgb, c.a);
o.Alpha = c.a;
}
ENDCG
}
Fallback "Transparent/Cutout/VertexLit"
}
Shader 2:
// Upgrade NOTE: replaced 'mul(UNITY_MATRIX_MVP,*)' with 'UnityObjectToClipPos(*)'
Shader "RetroAA/Sprite"
{
Properties
{
[PerRendererData] _MainTex ("Sprite Texture", 2D) = "white" {}
_Color ("Tint", Color) = (1,0,0,0)
}
SubShader
{
Tags {
"Queue"="Transparent"
"IgnoreProjector"="True"
"RenderType"="Transparent"
"PreviewType"="Plane"
"CanUseSpriteAtlas"="True"
}
LOD 100
Cull Off
Lighting Off
ZWrite Off
Blend SrcAlpha OneMinusSrcAlpha
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma multi_compile_fog
#include "RetroAA.cginc"
struct appdata {
float4 vertex : POSITION;
float4 color : COLOR;
float2 uv : TEXCOORD0;
};
struct v2f {
float4 vertex : SV_POSITION;
fixed4 color : COLOR;
float2 uv : TEXCOORD0;
};
sampler2D _MainTex;
float4 _MainTex_ST;
float4 _MainTex_TexelSize;
float4 _Color;
v2f vert(appdata v){
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
o.color = v.color * _Color;
return o;
}
fixed4 frag(v2f i) : SV_Target {
fixed4 color = RetroAA(_MainTex, i.uv, _MainTex_TexelSize);
return i.color*color*color.a;
}
ENDCG
}
}
}
The second shader isn't too complicated, and can be merged into the first one by just using the second shader's code to change how the first shader's surf calculates fixed4 c.
You'll also need to include the definition for RetroAA and include the Texel Size of the main texture in the shader variables.
However, the first shader assumes that there is no partial transparency, and the second shader requires it, so you have to accomodate that. You'll need to use Alpha blending, change the RenderType and Queue to Transparent, and indicate ZWrite Off.
Here is what that might all look together:
Shader "Transparent/Cutout/Lit3dSprite" {
Properties{
_MainCol("Main Tint", Color) = (1,1,1,1)
_MainTex("Main Texture", 2D) = "white" {}
_Cutoff("Alpha cutoff", Range(0,1)) = 0.5
}
SubShader{
// change RenderType and Queue to Transparent
Tags {"Queue" = "Transparent" "IgnoreProjector" = "True" "RenderType" = "Transparent" "PreviewType" = "Plane"}
Cull Off
ZWrite Off // Add this
LOD 200
// Enable Alpha blending here
Blend SrcAlpha OneMinusSrcAlpha
CGPROGRAM
// Enable Alpha blending here also
#pragma surface surf SimpleLambert alphatest:_Cutoff addshadow fullforwardshadows alpha:blend
#pragma target 3.0
sampler2D _MainTex;
float4 _MainTex_TexelSize; // Add this
fixed4 _MainCol;
// include this
fixed4 RetroAA(sampler2D tex, float2 uv, float4 texelSize)
{
float2 texelCoord = uv * texelSize.zw;
float2 hfw = 0.5*fwidth(texelCoord);
float2 fl = floor(texelCoord - 0.5) + 0.5;
float2 uvaa = (fl + smoothstep(0.5 - hfw, 0.5 + hfw, texelCoord - fl))*texelSize.xy;
return tex2D(tex, uvaa);
}
half4 LightingSimpleLambert(SurfaceOutput s, half3 lightDir, half atten)
{
half4 c;
// Fix the lambert lighting implementation here
half NdotL = dot(s.Normal, lightDir);
// We set the surface rgba in surf, so don't need to do it again here.
c.rgb = s.Albedo * (NdotL * atten) * _LightColor0.rgb;
c.a = s.Alpha;
return c;
}
struct Input {
float2 uv_MainTex;
float4 color: Color; // Add this to use SpriteRenderer color
};
void surf(Input IN, inout SurfaceOutput o) {
// replace this line:
// fixed4 c = tex2D(_MainTex, IN.uv_MainTex) * _MainCol;
// with this
fixed4 c = RetroAA(_MainTex, IN.uv_MainTex, _MainTex_TexelSize);
// factor in MainCol and SpriteRenderer color/tints
o.Albedo = c.rgb * _MainCol.rgb * IN.color.rgb;
o.Alpha = c.a * _MainCol.a * IN.color.a;
}
ENDCG
}
Fallback "Transparent/Cutout/VertexLit"
}
You might need to turn down the Alpha cutoff to zero or some other low number in order to make the AA work nicely.