Invalid subscript in unity shader - unity3d

Im pretty new to shader coding but I made this one by grouping 2 unlit shaders
When I try to make this shader, which is meant to create an outline and let me interpolate 2 textures unity tells me that there is this error: invalid subscript `vertex´ at line 61
Ive tried some things but i dont get this shader to work properly, if someone knows what do i have to do id be so thankful
Shader "Unlit/Combined"
{
Properties
{
_MainTex("Texture", 2D) = "white" {}
_SecondaryTex("Secondary Texture", 2D) = "white" {}
_LerpValue("Transition float", Range(0,1)) = 0.5
_OutlineColor("Outline color", color) = (0,0,0,1)
_OutlineWidth("Outline width", Range(1.0,5.0)) = 1.01
}
SubShader
{
Tags { "RenderType" = "Opaque" "Queue" = "Transparent"}
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
// make fog work
#pragma multi_compile_fog
#include "UnityCG.cginc"
struct appdata
{
//float4 vertex : POSITION;
float2 uv : TEXCOORD0;
float3 normal : NORMAL;
};
struct v2f
{
float2 uv : TEXCOORD0;
UNITY_FOG_COORDS(1)
float4 outvertex : SV_POSITION;
float4 pos : POSITION;
float3 normal : NORMAL;
};
sampler2D _MainTex;
float4 _MainTex_ST;
sampler2D _SecondaryTex;
float4 _SecondaryTex_ST;
float _LerpValue;
float _OutlineWidth;
float4 _OutlineColor;
v2f vert(appdata v)
{
v2f o;
v.vertex.xyz *= _OutlineWidth;
o.outvertex = UnityObjectToClipPos(v.vertex);
o.pos = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
UNITY_TRANSFER_FOG(o,o.vertex);
return o;
}
fixed4 frag(v2f i) : SV_Target
{
// sample the texture
fixed4 col = lerp(tex2D(_MainTex, i.uv),tex2D(_SecondaryTex, i.uv), _LerpValue);
return col;
return _OutlineColor;
}
ENDCG
}
Pass//Normal render
{
ZWrite On
Material
{
Diffuse[_Color]
Ambient[_Color]
}
Lighting On
SetTexture[_MainTex]
{
ConstantColor[_Color]
}
SetTexture[_MainTex]
{
Combine previous * primary DOUBLE
}
}
}
}

The line that has a problem is line 61:
v.vertex.xyz *= _OutlineWidth;
If you look at the type that v is declared as, its declared as appdata:
v2f vert(appdata v)
If you look at the definition of appdata:
struct appdata
{
//float4 vertex : POSITION;
float2 uv : TEXCOORD0;
float3 normal : NORMAL;
};
You'll see that it does not have a vertex property, only uv and normal.
So the line that's an issue should be changed to one of those, probably normal as you're trying to modify a 3-value property of it (and uv only has xy).
v.normal.xyz *= _OutlineWidth;
Alternatively you can modify appdata to have a vertex property by uncommenteing the commented out vertex:
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
float3 normal : NORMAL;
};
Or by removing line 61, depending on what you're trying to do.

Related

Why is my custom shader code saying : Unexpected token 'uv'?

I watched b3agz's making minecraft in unity tutorial on episode 15 there was an error when i coded the shade saying unexpected token 'uv' please help.
Here is my code for it:
Shader "Minecraft/Blocks"
{
Properties
{
_MainTex("Block Texture Atlas", 2D) = "white" {}
}
SubShader
{
Tags{"Queue"="AlphaTest" "IgnoreProjector"="True" "RenderType"="TransparentCutout"}
LOD 100
Lighting Off
Pass
{
CGPROGRAM
#pragma vertex vertFunction
#pragma fragment fragFunction
#pragma target 2.0
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION:
float2 uv : TEXCOORD0;
float4 color : COLOR;
};
struct v2f
{
float4 vertex : SV_POSITION:
float2 uv : TEXCOORD0;
float4 color : COLOR;
};
sampler2D _MainTex;
float GlobalLightLevel;
v2f vertFunction(appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
o.color = v.color;
return o;
}
fixed4 fragFunction(v2f i) : SV_Target
{
fixed4 col = tex2D(_MainTex, i.uv);
float localLightLevel = clamp(GlobalLightLevel + i.color.a,0,1);
clip(col.a - 1);
col = lerp(col, float4(0,0,0,1), localLightLevel);
return col;
}
ENDCG
}
}
}
Both of your struct definitions, appdata and v2f have a colon in place of a semi-colon for the float4 vertex.
The correct syntax would be:
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
float4 color : COLOR;
};
struct v2f
{
float4 vertex : SV_POSITION;
float2 uv : TEXCOORD0;
float4 color : COLOR;
};

What in this custom shader made in Unity could possibly create issues or not translate to openGLCore/GLES?

Attempting to create a custom shader for an application that I'm building for Magic Leap 1.
The shaders purpose is to only render the shadows cast onto the material. The shader works as intended in the unity editor but throws errors on mldb logs and the material turns pink which in my experience means issues with the renderpipeline/shader. I assume the issue is with some syntax or Unity Semantic that isn't easily translated to openGLCore/GLES by Unity's built in HLSLcc which compiles shaders into said language. The following is the shader from top to bottom:
Shader "Custom/SecondaryShadowShader"
{
Properties
{
[NoScaleOffset] _MainTex("Texture", 2D) = "grey" {}
_Color("Main Color", Color) = (1,1,1,1)
_ShadowColor("Shadow Color", Color) = (1,1,1,1)
_Cutoff("Alpha cutoff", Range(0,1)) = 0.5
}
SubShader
{
Pass
{
Tags {"LightMode" = "ForwardBase"}
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
fixed4 _ShadowColor;
#include "UnityCG.cginc"
#include "Lighting.cginc"
// compile shader into multiple variants, with and without shadows
// (we don't care about any lightmaps yet, so skip these variants)
#pragma multi_compile_fwdbase nolightmap nodirlightmap nodynlightmap novertexlight
#pragma multi_compile_instancing
// shadow helper functions and macros
#include "AutoLight.cginc"
struct appdata
{
float2 texcoord : TEXCOORD0;
float3 normal : NORMAL;
float4 vertex : POSITION;
UNITY_VERTEX_INPUT_INSTANCE_ID
};
struct v2f
{
float2 uv : TEXCOORD0;
SHADOW_COORDS(1) // put shadows data into TEXCOORD1
fixed3 diff : COLOR0;
fixed3 ambient : COLOR1;
float4 pos : SV_POSITION;
UNITY_VERTEX_OUTPUT_STEREO
};
v2f vert(appdata v)
{
v2f o;
UNITY_SETUP_INSTANCE_ID(v);
UNITY_INITIALIZE_OUTPUT(v2f, o);
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
o.pos = UnityObjectToClipPos(v.vertex);
o.uv = v.texcoord;
half3 worldNormal = UnityObjectToWorldNormal(v.normal);
half nl = max(0, dot(worldNormal, _WorldSpaceLightPos0.xyz));
o.diff = nl * _LightColor0.rgb;
o.ambient = ShadeSH9(half4(worldNormal,1));
// compute shadows data
TRANSFER_SHADOW(o)
return o;
}
UNITY_DECLARE_SCREENSPACE_TEXTURE(_MainTex);
fixed4 frag(v2f i) : SV_Target
{
UNITY_SETUP_STEREO_EYE_INDEX_POST_VERTEX(i);
fixed4 col = UNITY_SAMPLE_SCREENSPACE_TEXTURE(_MainTex, i.uv); //Insert
fixed shadow = SHADOW_ATTENUATION(i);
clip(-shadow);
fixed3 lighting = i.diff * shadow + i.ambient;
lighting += _ShadowColor * lighting;
col.rgb *= lighting;
return col;
}
ENDCG
}
UsePass "Legacy Shaders/VertexLit/SHADOWCASTER"
}
}

Duplicated system values in unlit shader (Unity)

Im pretty new to scripting shaders and i was trying to make a unlit shader work so it generates an outline around the object and lets me interpolate between 2 diferent textures, but I get 3 errors:
·Shader error in 'Unlit/Both': Duplicate system value semantic definition: output semantic 'POSITION' and output semantic 'SV_POSITION' at line 38
·Shader error in 'Unlit/Both': Duplicate system value semantic definition: input semantic 'SV_POSITION' and input semantic 'POSITION' at line 39
·Shader error in 'Unlit/Both': Duplicate system value semantic definition: output semantic 'SV_POSITION' and output semantic 'POSITION' at line 39
I guess there is something wrong with the position values but i have no idea about what to do so if anyone knows how to fix it i would be so thankful
{
Properties
{
_Color("Main Color", Color) = (0.5,0.5,0.5,1)
_MainTex ("Texture", 2D) = "white" {}
_SecondaryTex ("2nd Texture", 2D) = "white" {}
_LerpValue("Transition float", Range(0,1))= 0.5
_OutlineColor("Outline color", color) = (0,0,0,1)
_OutlineWidth("Outline width", Range(1.0,5.0)) = 1.01
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
// make fog work
#pragma multi_compile_fog
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
float3 normal : NORMAL;
};
struct v2f
{
float2 uv : TEXCOORD0;
UNITY_FOG_COORDS(1)
float4 vertex : SV_POSITION;
float4 pos : POSITION;
float3 normal : NORMAL;
};
sampler2D _MainTex;
float4 _MainTex_ST;
sampler2D _SecondaryTex;
float4 _SecondaryTex_ST;
float4 _LerpValue;
float _OutlineWidth;
float4 _OutlineColor;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
UNITY_TRANSFER_FOG(o,o.vertex);
v.vertex.xyz *= _OutlineWidth;
o.pos = UnityObjectToClipPos(v.vertex);
return o;
}
fixed4 frag(v2f i) : SV_Target
{
fixed4 col = lerp(tex2D(_MainTex, i.uv), tex2D(_SecondaryTex, i.uv),_LerpValue);
return col;
}
ENDCG
}
}
SubShader
{
Tags{"Queue" = "Transparent"}
Pass//Render the Outline
{
ZWrite Off
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
half4 frag(v2f i) : COLOR
{
return _OutlineColor;
}
ENDCG
}
Pass//Normal render
{
ZWrite On
Material
{
Diffuse[_Color]
Ambient[_Color]
}
Lighting On
SetTexture[_MainTex]
{
ConstantColor[_Color]
}
SetTexture[_MainTex]
{
Combine previous * primary DOUBLE
}
}
}
}
When you want to pass data from vertex shader to pixel shader you need to use TEXCOORD as semantic, in this case, you should change the 'POSITION' semantic with 'TEXCOORD2'(in v2f struct).
Semantics will specify what data you are passing(actually how to
interpret data). The first stage in the pipeline is IA(Input
Assembler) when you pass data from IA to vertex shader you use
semantic to know which property you are using, for example, in your 'appdata' struct, vertex is POSITION so when the data(like geometry,3d objects) is passed to the IA stage you need to know which data you are using in the GPU so by using semantic you can easily find the properties of each object(each object must have position and it's better to have normal, tangent, color,....)

Unity shader pixel manipulation

I'm studying shaders in unity and I'm trying to achieve the below image. But I can't seem to make my code produce my target output. My end goal is to divide the whole image depending on the size of my array. Any idea how to correct my code?
Target output:
Code:Shader "Custom/PassArray"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
}
SubShader
{
Tags {"Queue"="Transparent"}
Blend SrcAlpha OneMinusSrcAlpha
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
uniform float4 _Test [3];
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
v2f vert (appdata v)
{
v2f o;
o.vertex = mul(UNITY_MATRIX_MVP, v.vertex);
o.uv = v.uv;
return o;
}
sampler2D _MainTex;
fixed4 frag (v2f i) : SV_Target
{
fixed4 col;
_Test[0] = fixed4(0,0,1,1);
_Test[1] = fixed4(1,0,0,1);
_Test[2] = fixed4(0,1,0,1);
//Test1: This code block produces output1 image.
int index = round(i.uv.x*_Test.Length);
col = _Test[index];
//Test2: This codeblocks outputs pure green.
//if(i.uv.x < 1/3){
// col = _Test[0];
//}
//else if(i.uv.x < 2/3){
// col = _Test[1];
//}
//else if(i.uv.x < 3/3){
// col = _Test[2];
//}
return col;
}
ENDCG
}
}
}
Actual output:
If I understood it correctly, it's giving me black pixels because of the max i.uv.x is 1. Thus making the index 3 (based on the computation i.uv.x/_Test.Length). But I don't have index 3 in my array though.
Change round to floor. Because you're rounding, values greater than 2.5 get rounded to 3. As you probably know, an array of 3 elements has no index 3, hence the black bar.
I've figured it out. Here's the updated code for anyone who may find it useful. I changed the formula from int index = round(i.uv.x_Test.Length); to int index = ceil(i.uv.x/1_Test.Length)-1;
Shader "Custom/PassArray"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
}
SubShader
{
Tags {"Queue"="Transparent"}
Blend SrcAlpha OneMinusSrcAlpha
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
uniform float4 _Test [3];
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
float4 color : COLOR;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
float4 color : COLOR;
};
v2f vert (appdata v)
{
v2f o;
o.vertex = mul(UNITY_MATRIX_MVP, v.vertex);
o.uv = v.uv;
return o;
}
sampler2D _MainTex;
fixed4 frag (v2f i) : SV_Target
{
fixed4 col;
_Test[0] = fixed4(0,0,1,1);
_Test[1] = fixed4(1,0,0,1);
_Test[2] = fixed4(0,1,0,1);
//Test1:
int index = ceil(i.uv.x/1*_Test.Length)-1;
col = _Test[index];
return col;
}
ENDCG
}
}
}

CG/Unity: Toy Shader For Duplicating Object

I'm new to writing shaders and I'm working on a practice geometry shader. The goal of the shader is to make the "normal" pass produce transparent pixels such that the object is invisible, while the "geometry" pass will take each triangle, redraw in the same place as the original but colored black. Thus, I expect the output to be the original object, but black. However, my geometry pass seems to not produce any output I can see:
Here is the code I currently have for the shader.
Shader "Outlined/Silhouette2" {
Properties
{
_Color("Color", Color) = (0,0,0,1)
_MainColor("Main Color", Color) = (1,1,1,1)
_Thickness("Thickness", float) = 4
_MainTex("Main Texture", 2D) = "white" {}
}
SubShader
{
Tags{ "Queue" = "Geometry" "IgnoreProjector" = "True" "RenderType" = "Transparent" }
Blend SrcAlpha OneMinusSrcAlpha
Cull Back
ZTest always
Pass
{
Stencil{
Ref 1
Comp always
Pass replace
}
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma multi_compile_fog
#include "UnityCG.cginc"
struct v2g
{
float4 pos : SV_POSITION;
float2 uv : TEXCOORD0;
float3 viewT : TANGENT;
float3 normals : NORMAL;
};
struct g2f
{
float4 pos : SV_POSITION;
float2 uv : TEXCOORD0;
float3 viewT : TANGENT;
float3 normals : NORMAL;
};
float4 _LightColor0;
sampler2D _MainTex;
float4 _MainColor;
v2g vert(appdata_base v)
{
v2g OUT;
OUT.pos = mul(UNITY_MATRIX_MVP, v.vertex);
OUT.uv = v.texcoord;
OUT.normals = v.normal;
OUT.viewT = ObjSpaceViewDir(v.vertex);
return OUT;
}
half4 frag(g2f IN) : COLOR
{
//this renders nothing, if you want the base mesh and color
//fill this in with a standard fragment shader calculation
float4 texColor = tex2D(_MainTex, IN.uv);
float3 normal = mul(float4(IN.normals, 0.0), _Object2World).xyz;
float3 normalDirection = normalize(normal);
float3 lightDirection = normalize(_WorldSpaceLightPos0.xyz * -1);
float3 diffuse = _LightColor0.rgb * _MainColor.rgb * max(0.0, dot(normalDirection, lightDirection));
texColor = float4(diffuse,1) * texColor;
//
//return texColor;
return float4(0, 0, 0, 0);
}
ENDCG
}
Pass
{
Stencil{
Ref 0
Comp equal
}
CGPROGRAM
#include "UnityCG.cginc"
#pragma target 4.0
#pragma vertex vert
#pragma geometry geom
#pragma fragment frag
half4 _Color;
float _Thickness;
struct v2g
{
float4 pos : SV_POSITION;
float2 uv : TEXCOORD0;
float4 local_pos: TEXCOORD1;
float3 viewT : TANGENT;
float3 normals : NORMAL;
};
struct g2f
{
float4 pos : SV_POSITION;
float2 uv : TEXCOORD0;
float3 viewT : TANGENT;
float3 normals : NORMAL;
};
v2g vert(appdata_base v)
{
v2g OUT;
OUT.pos = mul(UNITY_MATRIX_MVP, v.vertex);
OUT.local_pos = v.vertex;
OUT.uv = v.texcoord;
OUT.normals = v.normal;
OUT.viewT = ObjSpaceViewDir(v.vertex);
return OUT;
}
[maxvertexcount(12)]
void geom(triangle v2g IN[3], inout TriangleStream<g2f> triStream)
{
g2f OUT;
OUT.pos = IN[0].pos;
OUT.uv = IN[0].uv;
OUT.viewT = IN[0].viewT;
OUT.normals = IN[0].normals;
triStream.Append(OUT);
OUT.pos = IN[1].pos;
OUT.uv = IN[1].uv;
OUT.viewT = IN[1].viewT;
OUT.normals = IN[1].normals;
triStream.Append(OUT);
OUT.pos = IN[2].pos;
OUT.uv = IN[2].uv;
OUT.viewT = IN[2].viewT;
OUT.normals = IN[2].normals;
triStream.Append(OUT);
}
half4 frag(g2f IN) : COLOR
{
_Color.a = 1;
return _Color;
}
ENDCG
}
}
FallBack "Diffuse"
}
Since all I'm doing is taking the same triangles I've been given and appending them to the triangle stream I'm not sure what I could be doing wrong to cause nothing to appear. Anyone know why this is happening?
I notice that you don't call
triStrem.RestartStrip(); after feeding in 3 vertices of a triangle in your geometry shader.
This informs the stream that a particular triangle strip has ended, and a new triangle strip will begin. If you don't do this, each (single) vertex passed to the stream will add on to the existing triangle strip, using the triangle strip pattern: https://en.wikipedia.org/wiki/Triangle_strip
I'm fairly new to geo-shaders myself, so I'm not sure if this is your issue or not, I don't THINK the RestartStrip function is called automatically at the end of each geomerty-shader, but have not tested this. Rather I think it gets called automatically only when you you reach the maxvertexcount. For a single triangle, I would set the maxvertexcount to 3, not the 12 you have now.
(I also know it can be tough to get ANY shader answers so figgured I'd try to help.)