After failing to implenet some vectors and matrix calculations in fragment shader, I tried to test basic matrix calculations in the vertex shader, just to be sure.
This works as expected:
uniform float4x4 _Identity4x4 = { 1.0, 0.0, 0.0, 0.0,
0.0, 1.0, 0.0, 0.0,
0.0, 0.0, 1.0, 0.0,
0.0, 0.0, 0.0, 1.0 };
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos( mul( v.vertex, _Identity4x4 ));
...
But this creates a havoc:
uniform float3x3 _Identity3x3 = { 1.0, 0.0, 0.0,
0.0, 1.0, 0.0,
0.0, 0.0, 1.0 };
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos( float4( mul( v.vertex.xyz, _Identity3x3 ), v.vertex.w));
...
I'm sure i'm missing something important, but can't figure it out or find some thing similar online.
So it turns out the matrices need to be static. Not sure why the 4x4 works and the 3x3 doesn't...
A basic explanartion can be found here.
Related
In Unity I'm planning on using a geometry shader for processing points into quads and can't quite figure out why I'm not getting output from my geometry shader. I've edited it down to a minimum working example, as seen here:
Shader "foo/bar"
{
SubShader{
Cull Off
Lighting Off
ZWrite Off
Pass{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma geometry geom
struct appdata {
float4 vertex : POSITION;
};
struct v2g {
float4 vertex : POSITION;
};
struct g2f {
float4 vertex : POSITION;
};
v2g vert(appdata v) {
v2g o = (v2g)0;
o.vertex = v.vertex;
return o;
}
[maxvertexcount(4)]
void geom(point v2g p[1], inout TriangleStream<g2f> tristream)
{
g2f o = (g2f)0;
o.vertex = float4(0.1, 0.1, 0, 0);
tristream.Append(o);
o.vertex = float4(0.1, 0.9, 0, 0);
tristream.Append(o);
o.vertex = float4(0.9, 0.9, 0, 0);
tristream.Append(o);
}
fixed4 frag(g2f i) : COLOR
{
return fixed4(1,1,1,1);
}
ENDCG
}
}
}
I'd expect that to draw a white triangle covering just under half the texture I'm rendering to. Instead it displays all black, just as it was before the shader.
So far I've:
Removed every possible source of culling I can think of
Made absolute sure my setup works when rendering a mesh in a similar fashion
Checked that this i getting input and running
and scoured the very limited amount of documentation available
I'm all outta ideas. If anyone has even a minor clue as to what I'm doing wrong please let me know. Thanks.
-Fraser
If I have a Unity shader which is acting as a basic screenwide image filter, would there be any way (outside of making a material glowing/ unshaded) to "recognize" specific pixels of a material? Let's say I want a heat vision filter, and allow specific objects to be considered "hot". How could the pixel color shader check their color or anything else and understand "this is hot"? (If I made it glowing/ unshaded, I could encode specific properties into subtle rgb changes, e.g. maybe if all of rgb end in *.***5 it would mean hot, but that wouldn't work with shading applied.) Thanks!
First you should convert your Image to grayscale then apply a color spectrum to It.If you look at below Image,
the closer to the white Is warmer and the closer to black Is colder.
then you can add this effect by using Replacement Shader.
Shader"Hidden/HeatMap"{
Properties{
_MainTex("_MainTex", 2D) = "white"{}
_Amount("Amount",Float) = 1
[Toggle]_Enable("Enable",Float) = 1
}
SubShader{
Pass{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma fragmentoption ARB_precision_hint_fastest
#include "UnityCG.cginc"
struct appdata{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
sampler2D _MainTex;
float _Amount,_Enable;
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
v2f vert(appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
return o;
}
fixed greyScale(fixed3 rgb) {
return dot(rgb, fixed3(0.29, 0.60, 0.11));
}
fixed3 heatMap(fixed greyValue) {
fixed3 heat = fixed3(0,0,0);
heat.r = smoothstep(0.4, 0.8, greyValue);
half OutColorGreen = smoothstep(0.0, 0.7, greyValue);
half InColorGreen = smoothstep(1.0, 0.7, greyValue);
heat.g = min(InColorGreen,OutColorGreen);
float OutColorBlue = smoothstep(1.0, 0.0, greyValue);
float InColorBlue = smoothstep(0.0, 0.25, greyValue);
heat.b = min(OutColorBlue,InColorBlue);
return heat;
}
fixed4 frag(v2f i) : COLOR{
fixed2 uv = i.uv;
fixed3 mainTex = tex2D(_MainTex, uv).rgb;
fixed grayValueA = greyScale(mainTex);
fixed3 rgbOut;
rgbOut = heatMap(uv.y);
rgbOut = heatMap(grayValueA * _Amount);
return fixed4(lerp(mainTex,rgbOut,_Enable),1);
}ENDCG
}
}
}
Let's check it out once more:
First you should convert your Image to grayscale.
then try to recolorize It by a method:
Using ZBuffer
In computer graphics, z-buffering, also known as depth buffering, is the management of image depth coordinates in 3D graphics, usually done in hardware, sometimes in software.
https://en.wikipedia.org/wiki/Z-buffering
using UnityEngine;
[ExecuteInEditMode]
public class CameraScript : MonoBehaviour {
public Material mat;
void Start()
{
GetComponent<Camera>().depthTextureMode = DepthTextureMode.Depth;
}
void OnRenderImage(RenderTexture source, RenderTexture destination)
{
Graphics.Blit(source, destination, mat);
}
}
Shader "Custom/HeatVision"
{
SubShader
{
Tags { "RenderType"="Opaque" }
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct v2f
{
float4 pos : SV_POSITION;
float4 screenuv : TEXCOORD1;
};
v2f vert (appdata_base v)
{
v2f o;
o.pos = UnityObjectToClipPos(v.vertex);
o.screenuv = ComputeScreenPos(o.pos);
return o;
}
sampler2D _CameraDepthTexture;
fixed greyScale(fixed3 rgb) {
return dot(rgb, fixed3(0.29, 0.60, 0.11));
}
fixed3 heatMap(fixed greyValue) {
fixed3 heat = fixed3(0,0,0);
heat.r = smoothstep(0.4, 0.8, greyValue);
half OutColorGreen = smoothstep(0.0, 0.7, greyValue);
half InColorGreen = smoothstep(1.0, 0.7, greyValue);
heat.g = min(InColorGreen,OutColorGreen);
float OutColorBlue = smoothstep(1.0, 0.0, greyValue);
float InColorBlue = smoothstep(0.0, 0.25, greyValue);
heat.b = min(OutColorBlue,InColorBlue);
return heat;
}
fixed4 frag (v2f i) : SV_Target
{
float2 uv = i.screenuv.xy / i.screenuv.w;
fixed3 mainTex = tex2D(_CameraDepthTexture, uv).rgb;
fixed grayValueA = greyScale(mainTex);
fixed3 rgbOut;
float Intensity = 15;
float depth = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, uv)*Intensity;
rgbOut = heatMap(uv.y);
rgbOut = heatMap(depth);
return float4(rgbOut.rgb,1);
}
ENDCG
}
}
}
I ported a Plasma ball shader from Shadertoy to Unity as Image Effect which is attached to the camera. It works fine on the Editor and Windows standalone build. It does not work on Android devices. It is flashing blue and black images on Android.
Here is what it looks like in Unity Editor and Windows Build:
Here is what it looks like on Android:
The ported Shader code:
Shader "Hidden/Plasma Space Ball Image Effect"
{
Properties
{
iChannel0("iChannel0", 2D) = "white" {}
//[MaterialToggle] _isToggled("isToggle", Float) = 0
}
SubShader
{
// No culling or depth
Cull Off ZWrite Off ZTest Always
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
v2f vert(appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
return o;
}
sampler2D iChannel0;
//Ported from https://www.shadertoy.com/view/MstXzf
float3 hb(float2 pos, float t, float time, float2 rot, float size, sampler2D tex0)
{
float2 newUv = 0.2*(pos / (1.2 - t) + 0.5*time*rot);
//float texSample = texture(tex0, newUv).b;
float texSample = tex2D(tex0, newUv).b;
float uOff = 0.2*(texSample + 0.3*time); //lsf3RH
float2 starUV = newUv + float2(uOff, 0.0);
//return float3(0.3, 0.3, 1.0) + 1.3*texture(tex0, starUV).b;
return float3(0.3, 0.3, 1.0) + 1.3*tex2D(tex0, starUV).b;
}
float4 blob(float2 uv, float size, float time, sampler2D tex0)
{
float2 center = float2(0., 0.);
float2 pos = center - uv;
float t = length(pos);
float st = size - t;
float2 rot = 0.005*float2(sin(time / 16.), sin(time / 12.)); //MslGWN
float alpha = smoothstep(0.0, 0.2*size, st);
float3 col = hb(pos, t, time, rot, size, tex0);
float a1 = smoothstep(-1.4, -1.0, -col.b);
col = lerp(col, hb(pos, t, -time, -rot, size, tex0), a1);
col += 0.8*exp(-12.*abs(t - 0.8*size) / size);
float a2 = smoothstep(-1.4, -1.0, -col.b);
alpha -= a2;
//float crosshair = float((abs(pos.x) < 0.005 && abs(pos.y) < 0.15) || (abs(pos.y) < 0.005&&abs(pos.x) < 0.15));
//return float4(col, alpha) + crosshair;
return float4(col, alpha);
}
float4 main_(float2 uv, float size)
{
return blob(uv, size, _Time.y, iChannel0);
}
fixed4 frag(v2f i) : SV_Target
{
float4 fragColor = 0;
float2 fragCoord = i.vertex.xy;
///---------------------------------------------------
float2 uv = fragCoord.xy / _ScreenParams.xy;
float2 cr = uv*2. - 1.;
cr.x *= _ScreenParams.x / _ScreenParams.y;
//late addition to elaborate background motion, could be reused later on
float2 rot = 0.5*float2(sin(_Time.y / 16.), sin(_Time.y / 12.));
float4 ball = clamp(main_(cr, sin(_Time.y)*0.05 + 0.5 + 0.5), 0., 1.);
//float3 bg = float3(0.7, 0.7, 1.0)*texture(iChannel0, uv + rot + 0.1*ball.rb).b;
float3 bg = float3(0.7, 0.7, 1.0)*tex2D(iChannel0, uv + rot + 0.1*ball.rb).b;
//simulated gl blend
fragColor = float4(lerp(bg, ball.rgb, ball.a), 1.0);
//fragColor = lerp(fragColor,tex2D(iChannel0, i.uv).rgba,.5);
return fragColor;
}
ENDCG
}
}
}
You can find the image that is used for the iChannel0 input slot here in the Shader above.
Things I've tried:
Adding the shader to the Graphics Settings so that Unity will include
it in during build process.
Disabling Auto Graphics API and trying OpenGLES2 and OpenGLES3.
Checking the log with Android Studio. No error/warning at-all.
None of these solved the problem and I ran out of things to try.
Software and Device Info if that helps:
Unity 5.6.0f3
Android 4.4.2
This is used for learning and educational purposes as I am studying GLSL, HLSL, CG/shaderlab shader language. I just want to know why the ported shader is not working as expected on Android devices.
Why is it flashing blue and black images on Android?
You need to use the VPOS semantic for positions in the fragment shader for OpenGLES2.
From Unity docs:
A fragment shader can receive position of the pixel being rendered as
a special VPOS semantic. This feature only exists starting with shader
model 3.0, so the shader needs to have the #pragma target 3.0
compilation directive.
So to get screen space positions:
// note: no SV_POSITION in this struct
struct v2f {
float2 uv : TEXCOORD0;
};
v2f vert (
float4 vertex : POSITION, // vertex position input
float2 uv : TEXCOORD0, // texture coordinate input
out float4 outpos : SV_POSITION // clip space position output
)
{
v2f o;
o.uv = uv;
outpos = UnityObjectToClipPos(vertex);
return o;
}
fixed4 frag (v2f i, UNITY_VPOS_TYPE screenPos : VPOS) : SV_Target
{
// screenPos.xy will contain pixel integer coordinates.
float4 fragColor = 0;
float2 fragCoord = screenPos;
But you already pass in uvs so maybe you can use those?
float2 uv = i.uv;
It turns out I was wrong. You dont get clip space positions in the fragment shader in OpenGLES2 you get .. 0. (Maybe someone can explain this?)
I made a small test shader:
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
float4 vert (float4 vertex : POSITION) : SV_Position
{
return UnityObjectToClipPos(vertex);
}
fixed4 frag (float4 screenPos : SV_Position) : SV_Target
{
float uvx = screenPos.x/_ScreenParams.x;
return float4(uvx, 0., 0., 1.);
}
ENDCG
and the line float uvx = screenPos.x/_ScreenParams.x; gets compiled as tmpvar_2.x = (0.0 / _ScreenParams.x); // OpenGLES2
u_xlat0 = gl_FragCoord.x / _ScreenParams.x; // OpenGLES3
But if you use the VPOS semantic
fixed4 frag (float4 screenPos : VPOS) : SV_Target the same line gets compiled as
tmpvar_2.x = (gl_FragCoord.x / _ScreenParams.x); // OpenGLES2
u_xlat0 = gl_FragCoord.x / _ScreenParams.x; // OpenGLES3
So for OpenGLES2 it looks like you need to use the VPOS semantic to get positions in screen space in the fragment shader.
I'm trying to make a single shader that checks if it has collided with a mesh (with the same shader), if it has, then it should not render that collided part at all. For both meshes.
I'm trying to get this effect:
I have used a crosssection shader to achieve this but it's not what I want:
This is the shader code:
// Upgrade NOTE: replaced 'glstate.matrix.invtrans.modelview[0]' with 'UNITY_MATRIX_IT_MV'
// Upgrade NOTE: replaced 'glstate.matrix.mvp' with 'UNITY_MATRIX_MVP'
// 2 Pass Edition
Shader "cross_section_v004a"
{
Properties
{
section_depth ("section depth (x, y, z, depth)", vector) = (0,0,0,0.15)
section_color ("section color", color) = (0.5,0.1, 0.1, 1)
color_map ("color map", 2D) = "white" {}
}
SubShader
{
Pass
{
CULL OFF
CGPROGRAM //--------------
//#pragma target 3.0
#pragma vertex vertex_shader
#pragma fragment fragment_shader
#include "UnityCG.cginc"
uniform float4 section_depth;
uniform float4 section_color;
uniform sampler2D color_map;
float4x4 rotate(float3 r)
{
float3 c, s;
sincos(r.x, s.x, c.x);
sincos(r.y, s.y, c.y);
sincos(r.z, s.z, c.z);
return float4x4( c.y*c.z, -s.z, s.y, 0,
s.z, c.x*c.z, -s.x, 0,
-s.y, s.x, c.x*c.y, 0,
0, 0, 0, 1 );
}
struct a2v
{
float4 vertex : POSITION;
float4 color : COLOR;
float2 texcoord : TEXCOORD;
float3 normal : NORMAL;
};
struct v2f
{
float4 position : POSITION;
float2 texcoord : TEXCOORD0;
float4 normal : TEXCOORD1;
float4 vertex : TEXCOORD2;
float4 mask : TEXCOORD3;
};
v2f vertex_shader( a2v IN )
{
v2f OUT;
float4x4 r = rotate(radians(section_depth.xyz) +_SinTime.xyz);
float4 c = float4(IN.vertex.xyz,1);
OUT.mask = mul(r, c);
OUT.position = mul(UNITY_MATRIX_MVP, IN.vertex);
OUT.texcoord = IN.texcoord;
r *= float4x4( 1,-1,-1, 0,
-1, 1,-1, 0,
-1,-1, 1, 0,
0, 0, 0, 1 ); // the section_depth.xyz need to be inverted !
OUT.normal = mul(r, float4(1,0,0,1));
OUT.vertex = IN.vertex;
return OUT;
}
void fragment_shader( v2f IN,
out float4 finalcolor : COLOR)
{
if(IN.mask.x > section_depth.w)
discard;
float3 N = IN.normal.xyz;
N = mul(UNITY_MATRIX_IT_MV, float4(N, 1));
//float diffuse = saturate(dot(glstate.light[0].position, N));
finalcolor = float4(0,0,0,1);
finalcolor.xyz = section_color *(0.6 +0.4);
}
ENDCG //--------------
} // Pass
//---------------------------------------------------------------------------------
Pass
{
CULL BACK
CGPROGRAM //--------------
#pragma vertex vertex_shader
#pragma fragment fragment_shader
#include "UnityCG.cginc"
uniform float4 section_depth;
uniform float4 section_color;
uniform sampler2D color_map;
float4x4 rotate(float3 r)
{
float3 c, s;
sincos(r.x, s.x, c.x);
sincos(r.y, s.y, c.y);
sincos(r.z, s.z, c.z);
return float4x4( c.y*c.z, -s.z, s.y, 0,
s.z, c.x*c.z, -s.x, 0,
-s.y, s.x, c.x*c.y, 0,
0, 0, 0, 1 );
}
struct a2v
{
float4 vertex : POSITION;
float4 color : COLOR;
float2 texcoord : TEXCOORD;
float3 normal : NORMAL;
};
struct v2f
{
float4 position : POSITION;
float2 texcoord : TEXCOORD0;
float3 normal : TEXCOORD1;
float4 vertex : TEXCOORD2;
float4 mask : TEXCOORD3;
};
v2f vertex_shader( a2v IN )
{
v2f OUT;
float4x4 r = rotate(radians(section_depth.xyz) +_SinTime.xyz);
float4 c = float4(IN.vertex.xyz,1);
OUT.mask = mul(r, c);
OUT.position = mul(UNITY_MATRIX_MVP, IN.vertex);
OUT.texcoord = IN.texcoord;
OUT.normal = IN.normal;
OUT.vertex = IN.vertex;
return OUT;
}
void fragment_shader( v2f IN,
out float4 finalcolor : COLOR)
{
if(IN.mask.x > section_depth.w)
discard;
float3 N = IN.normal;
N = mul(UNITY_MATRIX_IT_MV, float4(N, 1));
//float diffuse = saturate(dot(glstate.light[0].position, N));
finalcolor = float4(0,0,0,1);
finalcolor.xyz = tex2D(color_map, IN.texcoord).xyz *(0.6 +0.4);
}
ENDCG //--------------
} // Pass
} // SubShader
} // Shader
However, the rest of the object should be visible, I mean to say that I don't want the parts that are NOT getting intersected to be invisible (when viewed from the invisible parts) so Stencil shaders won't or aren't doing the trick. Something like this:
I want to know what is the correct way to approach to this problem. I'm fairly new to shader programming and I don't know to solve this issue. Any help would be appreciated.
Thanks!
I need to manipulate the color of a particular area in a shader. The area is hexagonal and determined by outside sources at runtime.
I used a hexagonal texture on the shader and I need to change the colors of some of the cells individually.
I'm using a shader from Unity3D Wiki but I dont have much experience with shaders.
If someone could provide me a source that I can figure out how I would be glad.
Here is the shader I'm using and the link,
Shader "Custom/Shield"
{
Properties
{
_Color("_Color", Color) = (0.0,1.0,0.0,1.0)
_Inside("_Inside", Range(0.0,2.0) ) = 0.0
_Rim("_Rim", Range(0.0,1.0) ) = 1.2
_Texture("_Texture", 2D) = "white" {}
_Speed("_Speed", Range(0.5,5.0) ) = 0.5
_Tile("_Tile", Range(1.0,10.0) ) = 5.0
_Strength("_Strength", Range(0.0,5.0) ) = 1.5
}
SubShader
{
Tags
{
"Queue"="Transparent"
"IgnoreProjector"="True"
"RenderType"="Transparent"
}
Cull Back
ZWrite On
ZTest LEqual
CGPROGRAM
#pragma surface surf BlinnPhongEditor alpha vertex:vert
//#pragma target 3.0
fixed4 _Color;
sampler2D _CameraDepthTexture;
fixed _Inside;
fixed _Rim;
sampler2D _Texture;
fixed _Speed;
fixed _Tile;
fixed _Strength;
struct EditorSurfaceOutput
{
half3 Albedo;
half3 Normal;
half3 Emission;
half3 Gloss;
half Specular;
half Alpha;
};
inline half4 LightingBlinnPhongEditor_PrePass (EditorSurfaceOutput s, half4 light)
{
half3 spec = light.a * s.Gloss;
half4 c;
c.rgb = (s.Albedo * light.rgb + light.rgb * spec);
c.a = s.Alpha + Luminance(spec);
return c;
}
inline half4 LightingBlinnPhongEditor (EditorSurfaceOutput s, half3 lightDir, half3 viewDir, half atten)
{
viewDir = normalize(viewDir);
half3 h = normalize (lightDir + viewDir);
half diff = max (0, dot (s.Normal, lightDir));
float nh = max (0, dot (s.Normal, h));
float3 spec = pow (nh, s.Specular*128.0) * s.Gloss;
half4 res;
res.rgb = _LightColor0.rgb * (diff * atten * 2.0);
res.w = spec * Luminance (_LightColor0.rgb);
return LightingBlinnPhongEditor_PrePass( s, res );
}
struct Input
{
float4 screenPos;
float3 viewDir;
float2 uv_Texture;
};
void vert (inout appdata_full v, out Input o)
{
UNITY_INITIALIZE_OUTPUT(Input,o);
}
void surf (Input IN, inout EditorSurfaceOutput o)
{
o.Albedo = fixed3(0.0,0.0,0.0);
o.Normal = fixed3(0.0,0.0,1.0);
o.Emission = 0.0;
o.Gloss = 0.0;
o.Specular = 0.0;
o.Alpha = 1.0;
float4 ScreenDepthDiff0= LinearEyeDepth (tex2Dproj(_CameraDepthTexture, UNITY_PROJ_COORD(IN.screenPos)).r) - IN.screenPos.z;
float4 Saturate0=fixed4(0.3,0.3,0.3,1.0);//
float4 Fresnel0_1_NoInput = fixed4(0,0,1,1);
float dNorm = 1.0 - dot(normalize(float4(IN.viewDir, 1.0).xyz), normalize(Fresnel0_1_NoInput.xyz) );
float4 Fresnel0 = float4(dNorm,dNorm,dNorm,dNorm);
float4 Step0=step(Fresnel0,float4( 1.0, 1.0, 1.0, 1.0 ));
float4 Clamp0=clamp(Step0,_Inside.xxxx,float4( 1.0, 1.0, 1.0, 1.0 ));
float4 Pow0=pow(Fresnel0,(_Rim).xxxx);
float4 Multiply5=_Time * _Speed.xxxx;
float4 UV_Pan0=float4((IN.uv_Texture.xyxy).x,(IN.uv_Texture.xyxy).y + Multiply5.x,(IN.uv_Texture.xyxy).z,(IN.uv_Texture.xyxy).w);
float4 Multiply1=UV_Pan0 * _Tile.xxxx;
float4 Tex2D0=tex2D(_Texture,Multiply1.xy);
float4 Multiply2=Tex2D0 * _Strength.xxxx;
float4 Multiply0=Pow0 * Multiply2;
float4 Multiply3=Clamp0 * Multiply0;
float4 Multiply4=Saturate0 * Multiply3;
o.Emission = Multiply3.xyz * _Color.rgb;
o.Alpha = Multiply3.w * _Color.a;
}
ENDCG
}
Fallback "Diffuse"
}
http://wiki.unity3d.com/index.php/Shield
Shader programs can actually have more parameters (like matrices, vectors and floats) that are set on the material from code at runtime, but if they are not part of the Properties block then their values will not be saved. This is mostly useful for values that are completely script code-driven (using Material.SetFloat and similar functions).
Source: https://docs.unity3d.com/Manual/SL-Properties.html
It would seem that you would use have to use unique materials with different colors. You can then dynamically adjust the material color with Material.SetColor() .