I am trying to create a GameObejct that if active reverses the colors of anything behind it. Ideally, this GameObject expands from its centre point to a certain size radius when it appears/becomes active.
So imagine a circle whose Scale increases from x=0 and y=0 to x=5 and y=5 gradually over 1 or 2 seconds and reverses the colors of anything behind. Doesn't exactly matter how long or how big the actual circle is, just an example of the idea. I tried just creating a GameObect with a mask and a shader I found online, but t didn't work at all and I still wasn't even sure how to animate the expansion.
Does anyone have any idea on how I could make this dream effect of mine a reality?
Here is a shader that does nothing except invert the color.
The important part is Alpha:Blend and Blend OneMinusDstColor Zero.
Shader "Custom/Invert"
{
Properties
{
_Color ("Color", Color) = (1,1,1,1)
_MainTex ("Albedo (RGB)", 2D) = "white" {}
}
SubShader
{
Tags { "RenderType"="Transparent" "Queue"="Transparent" }
LOD 200
Blend OneMinusDstColor Zero
PASS
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma target 3.0 Alpha:Blend
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
fixed4 _Color;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = float2(0,0);
return o;
}
fixed4 frag (v2f i) : SV_Target
{
return _Color;
}
ENDCG
}
}
FallBack "Diffuse"
}
You can put this on any sprite or 3D object to get this effect:
As for the animation, King's answer is nice and simple. Alternatively you could animate the scale by code:
public class Scale : MonoBehaviour
{
void Update ()
{
var scale = Mathf.Sin(Time.timeSinceLevelLoad / 4f) * 20;
this.transform.localScale = new Vector3(scale, scale, 1);
}
}
This particular code will make the sprite 'pulse'. Like this:
I think you should be able to make your own scaling script from this example.
Select your GameObject->Window->Animation->Animation->Create->Save->Add Property->Transform->Scale. At your 0:00 keyframe set your scale for x,y,z to 1. Click over to your 1:00 keyframe and set your scale for x,y,z to 2. Press play and your animation should loop from scale of 1 to 2.
Related
I don't know much about shader :( if someone could help
I am using this free 2d water asset it contains a water shader. I want to edit that shader so it looks more like 2.5d or 3d.
Currently, it looks like this
I want to make it look like
Adding a foam type thing on top and make it little reflective
shader used in the asset to make 2d water
Shader "Water2D/Metaballs_Simple" {
Properties {
_MainTex ("Texture", 2D) = "white" { }
_Color ("Main color", Color) = (1,1,1,1)
_Cutoff ("Alpha cutoff", Range(0,1)) = 0.5
_Stroke ("Stroke alpha", Range(0,1)) = 0.1
_StrokeColor ("Stroke color", Color) = (1,1,1,1)
}
/// <summary>
/// Multiple metaball shader.
/// </summary>
SubShader {
Tags {"Queue"="AlphaTest" "IgnoreProjector"="True" "RenderType"="TransparentCutout"}
GrabPass{}
Pass {
Blend SrcAlpha OneMinusSrcAlpha
// Blend One One // Additive
// Blend One OneMinusSrcAlpha
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
float4 _Color;
sampler2D _MainTex;
fixed _Cutoff;
fixed _Stroke;
half4 _StrokeColor;
float2 _screenPos;
float4 _CameraDepthTexture_TexelSize;
struct v2f {
float4 pos : SV_POSITION;
float2 uv : TEXCOORD0;
};
float4 _MainTex_ST;
v2f vert (appdata_base v){
v2f o;
o.pos = UnityObjectToClipPos (v.vertex);
o.uv = TRANSFORM_TEX (v.texcoord, _MainTex);
return o;
};
half4 frag (v2f i) : COLOR{
half4 texcol= tex2D (_MainTex, i.uv);
//half4 finalColor = texcol;
clip(texcol.a - _Cutoff);
if (texcol.a < _Stroke) {
texcol = _StrokeColor;
} else {
texcol = _Color;
}
return texcol;
}
ENDCG
}
}
Fallback "VertexLit"
}
Judging by your shader, it appears as if the fluid has a texture containing an alpha gradient based on the distance to the edge. If this is anything similar to a distance field, then you can use it to reconstruct a screen-space normal based on the derivatives.
float3 normal;
normal.x = ddx(texcol.a);
normal.y = ddy(texcol.a);
normal.z = sqrt(1 - normal.x*normal.x - normal.y * normal.y);
Then, you can use this value to calculate some basic blinn-phong specular lighting. This involves using the sum of the vector to the light and the vector to the camera to get a half-way vector.
Since our normal is in camera space, we might want to do all other calculations in camera space too. Ideally, we want to do as many calculations as we can inside the vertex shader and then pass them over to the fragment shader:
struct v2f {
float4 pos : SV_POSITION;
float2 uv : TEXCOORD0;
float3 viewDir : TEXCOORD1;
float3 lightDir : TEXCOORD2;
};
float4 _MainTex_ST;
v2f vert (appdata_base v){
v2f o;
o.pos = UnityObjectToClipPos (v.vertex);
o.viewDir = mul((float3x3)UNITY_MATRIX_MVP, ObjSpaceViewDir(v.vertex));
o.lightDir = mul((float3x3)UNITY_MATRIX_MVP, ObjSpaceLightDir (v.vertex));
o.uv = TRANSFORM_TEX (v.texcoord, _MainTex);
return o;
};
I'm assuming that ObjSpaceLightDir works for 2D lights, but i'm not sure.
Inside the fragment shader, we can now get the specular highlights:
float3 lightDir = normalize(i.lightDir);
float3 viewDir = normalize(i.viewDir);
float3 halfDir = normalize(lightDir + viewDir);
float spec = pow(saturate(dot(normal, halfDir)), _Glossiness * 128);
return saturate(_BaseColor + spec);
You can also add some subtle lambert shading to create shadows underneath:
float diff = saturate(dot(normal, lightDir));
// 20% diffuse contribution, because lots of light passes through the liquid.
// Can be tweaked based on artistic needs.
return saturate(_BaseColor * (0.8 + 0.2 * diff) + spec);
Be aware though that this will not properly give the illusion of thickness that your reference has. For this, you need to simulate the thickness of the fluid into a heightmap and generate normals based on that. You can break up the shape of the specular by adding some procedural noise to it, though i haven't tried this. A big challenge when writing shaders is that the GPU doesn't know anything about the context of the pixel other than what you give to it, so things like the distance to the nearest edge have to be pre-computed and fed into the material as textures. I'm honestly not sure how they manage to produce the foam on top - it might be a part of the water texture or a separate sprite rendered in the background.
So I am very new to shader programing (basically just started today) and I got this code from a tutorial on Youtube which works great. It just found the pixel on the edge of a texture, and if so, replace it with a plain color. I want to be able to set the transparency of the color i'm returning.
But it does not seems to be working
Shader "Custom/OutlineShader"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_Color("Color", Color) = (1, 1, 1, 1)
_AlphaOffset("Transparency", Range(0,1)) = 1
}
SubShader
{
Tags{ "Queue"="Transparent" "RenderType"="Opaque"}
Cull Off ZWrite Off ZTest Always
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag Lambert alpha
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
fixed4 _Color;
sampler2D _MainTex;
float4 _MainTexelSize;
float _AlphaOffset;
v2f vert (appdata v)
{
v2f OUT;
OUT.vertex = UnityObjectToClipPos(v.vertex);
OUT.uv = v.uv;
return OUT;
}
fixed4 frag (v2f i) : SV_Target
{
fixed4 col = tex2D(_MainTex, i.uv);
col.rgb *= col.a;
fixed4 outlineColor = _Color;
// This is where I want the shader to be transparent or not based on the _AlphaOffset Value
outlineColor.a *= ceil(col.a) * _AlphaOffset;
// This a code just to check is the current pixel is on the edge of the texture
fixed upAlpha = tex2D(_MainTex, i.uv + fixed2(0, _MainTexelSize.y)).a;
fixed downAlpha = tex2D(_MainTex, i.uv - fixed2(0, _MainTexelSize.y)).a;
fixed leftAlpha = tex2D(_MainTex, i.uv - fixed2(_MainTexelSize.x, 0)).a;
fixed rightAlpha = tex2D(_MainTex, i.uv + fixed2(_MainTexelSize.x, 0)).a;
// If it's on the edge, return the color (+ alpha) else, just return the same pixel
return lerp(outlineColor, col, ceil(upAlpha * downAlpha * leftAlpha * rightAlpha));
}
ENDCG
}
}
}
I would like this line
outlineColor.a *= ceil(col.a) * _AlphaOffset; to set the alpha of the pixel I'm returning.
Thanks !
There are primarily two things which are wrong here - firstly, you have set your RenderType to "Opaque", which expectedly makes it render as non-transparent. This should instead be set to "Transparent". Second, you need to specify a blend mode to determine how the color from this object blends with what's already rendered to the buffer. From the Unity manual on blending:
Blend SrcFactor DstFactor: Configure and enable blending. The
generated color is multiplied by the SrcFactor. The color already on
screen is multiplied by DstFactor and the two are added together.
For regular alpha blending, add this statement inside your subshader:
Blend SrcAlpha OneMinusSrcAlpha
For additive blending, which results in a glow-like effect, use this:
Blend One One
I have a light projector with a material attached to it and a script that projects a caustic effect underwater. Inside the script Inspector I have multiple caustics textures generated with by a software and it iterates in each of them changing the material texture of the projector.
How can I make the shader behave more like light and be invisible on shadows?
I have no experience in shaders and the shader is not from me.
Shader "Projector/Caustics" {
Properties {
_Color ("Color", Color) = (1,1,1,0)
[NoScaleOffset]_MainTex ("Texture", 2D) = "black" { }
_Size ("Grid Size", Float) = 10
_Height ("Water Height", Float) = 2.0
_DepthBlend ("Depth Blend", Float) = 10.0
_EdgeBlend ("Edge Blend", Range (0, 100)) = 0.5
_Multiply ("Multiply", Range (1, 2)) = 1.0
_LOD ("LOD Bias", Range (1, 1000)) = 100
}
Subshader {
Tags { "RenderType"="Transparent" "Queue"="Transparent+100" }
Pass {
ZWrite Off
Offset -1, -1
//Blend OneMinusDstColor One //- Soft Additive
//Blend One One //- Linear Dodge
Blend DstColor One
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma multi_compile_fog
#include "UnityCG.cginc"
struct v2f {
float4 pos : SV_POSITION;
float2 uv : TEXCOORD0;
float3 wPos : TEXCOORD1; // added for height comparisons.
UNITY_FOG_COORDS(2)
};
uniform sampler2D _MainTex;
float4 _MainTex_ST;
float4 _Color;
float4x4 unity_Projector;
float _Size;
float _Height;
float _DepthBlend;
float _EdgeBlend;
float _Multiply;
float _LOD;
float dist;
v2f vert (appdata_tan v) {
v2f o;
o.pos = UnityObjectToClipPos (v.vertex);
o.wPos = mul(unity_ObjectToWorld, v.vertex).xyz;
o.uv = TRANSFORM_TEX (mul (unity_Projector, v.vertex).xy, _MainTex);
UNITY_TRANSFER_FOG(o,o.pos);
return o;
}
fixed4 frag (v2f i) : COLOR {
dist = distance(_WorldSpaceCameraPos, i.wPos);
fixed4 c = tex2Dlod (_MainTex, float4(fmod (i.uv, 1 / _Size)*_Size,0,dist/_LOD)); // project tiled texture, set lod.
if (i.wPos.y<_Height)
c = c-(i.wPos.y-_Height)/-_DepthBlend*2;
else
c = lerp(c,0,(i.wPos.y-_Height)/_EdgeBlend);
c = saturate(c);
UNITY_APPLY_FOG_COLOR(i.fogCoord, c, fixed4(0,0,0,0));
return c * _Color *_Multiply ; // apply final color
}
ENDCG
}
}
}
Unfortunately, you can't have shadows with projectors, which is what you would need for this. Unity also doesn't support shadows for transparent object.
There are three options for achieving this effect:
Use deferred decals, which are only supported in deferred mode.
Use a screen-space effect.
Create a custom terrain shader which displays the caustics as part of the terrain rendering based on the world space Y position. You would also need a similar shader modification to all other objects which are to receive caustics.
There are some examples on deferred decals in this post. Unfortunately though, without any knowledge of shader programming, it is not that straightforward to fix. There are some water solutions for Unity which have this feature built-in, though - for instance, LUX uses screen-space caustics which is pretty cool.
I am trying to make that a sphere emits light by using a material from the own unity, but no light is being created or emitted. What is wrong?
You can modify the emission in the material, without any special shader
You can then switch off all lights in the scene and you will be able to still see this GameObject
You can test it adding a script that make the ambient light to be dark grey
void Start () {
Color myColor = new Color32( 0x0B, 0x0A, 0x0A, 0xFF );
RenderSettings.ambientLight = myColor;
}
The result would be something like this, a cube which emit yellow light and a cube which is almost in darkness
Now if you want (which I am still not sure) to iluminate other elements in the scene. You should select the object as static in the right top corner.
And the result would be:
If this solution still does´t work for you. You can try with shaders:
Create a new Shader, name it Glow and replace its default code by this:
Script
Shader "Glow" {
Properties {
_MainTex ("Texture", 2D) = "white" {}
_Color ("Color", Color) = (1,1,1,1)
_Glow ("Intensity", Range(0, 3)) = 1
}
SubShader {
Tags { "Queue" = "Transparent" "IgnoreProjector" = "True" "RenderType" = "Transparent" }
LOD 100
Cull Off
ZWrite On
Blend SrcAlpha OneMinusSrcAlpha
Pass {
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
sampler2D _MainTex;
half4 _MainTex_ST;
fixed4 _Color;
half _Glow;
struct vertIn {
float4 pos : POSITION;
half2 tex : TEXCOORD0;
};
struct v2f {
float4 pos : SV_POSITION;
half2 tex : TEXCOORD0;
};
v2f vert (vertIn v) {
v2f o;
o.pos = mul(UNITY_MATRIX_MVP, v.pos);
o.tex = v.tex * _MainTex_ST.xy + _MainTex_ST.zw;
return o;
}
fixed4 frag (v2f f) : SV_Target {
fixed4 col = tex2D(_MainTex, f.tex);
col *= _Color;
col *= _Glow;
return col;
}
ENDCG
}
}
}
Create a Material
Change its color to what you want and the Shader type select Glow
(the one you created in the fist step). Also choose the intensity you prefer.
The result would be:
With this version, you can not iluminate the nearby objects.
Finally, to make a better glomming effect you can add some effects to the camera.
Go to Assets -> Import Package -> Effects
Select the Main Camera and Add Component: Image Effects -> Bloom and
Glow -> Bloom Optimized
You will have the following component in your camera:
And the result of adding this effect to the camera would be:
The problem was solved only checking all objects in the scene as Static!
I'm learning how to make shader in Unity and I have a question about shaders applied on sprites.
I made a shader which modifies a sprite to create a distortion effect:
But, as you can see on the picture linked above, my sprite is cut on the boundaries of the sprite.
Is there any way to avoid this cut and draw the missing part ?
My shader :
Shader "Shader101/testShader"
{
Properties
{
_MainTex( "Texture", 2D ) = "white" {}
_DisplaceTex( "Displacement Texture", 2D ) = "white" {}
_Power("Power", Range(-0.4, 0.4)) = 0
}
SubShader
{
Tags
{
"Queue" = "Transparent"
}
Pass
{
Blend SrcAlpha OneMinusSrcAlpha
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float4 vertex : SV_POSITION;
float2 uv : TEXCOORD0;
};
v2f vert (appdata v)
{
v2f o;
o.vertex = mul( UNITY_MATRIX_MVP, v.vertex);
o.uv = v.uv;
return o;
}
sampler2D _MainTex;
sampler2D _DisplaceTex;
float _Power;
fixed4 frag (v2f i) : SV_Target
{
float2 distuv = float2( 0, i.uv.y + _Time.x * 2 );
float2 wave = tex2D( _DisplaceTex, distuv * 5 ).xy;
wave = (( wave * 2 ) - 1) * _Power - 0.25;
float4 color = tex2D( _MainTex, i.uv + float2(wave.y, 0) );
return color;
}
ENDCG
}
}
}
Sorry for my poor english :|
Thank you
Take a look at your scene view with shaded wireframe mode on.
Here you should see that your shader does not change the vertex position, but only changes the rendered pixel colors in the fragment shader. Therefore it can not draw outside of the mesh.
The easiest way to fix it would be to modify the sprite texture with more transparent areas around your cat. (In Photoshop > Image > Canvas Size ...)
This is still a high search result without a satisfactory answer.
Two settings may help:
Extrude edges - Adds padding around the sprite
Full Rect - Required if you manually add extra space around a sprite, causes the wireframe to use all the empty space, instead of just the opaque areas.