Portal shader in Unity - unity3d

I'm trying to implement portals in Unity. The way I'm doing it is having a camera mimic the player's position and rotation in the other portals local space.
My problem is, that whatever is behind the portal gets rendered on this camera, so if I move back from the portal, some other stuff will clip in. I've made a shader that's supposed to add a clipping plane, but it seems it's either clipping everything or nothing:
Shader "Hidden/ClipPlaneShader"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_PlaneNormal("PlaneNormal",Vector) = (0,1,0,0)
_PlanePosition("PlanePosition",Vector) = (0,0.5,0,0)
}
SubShader
{
// No culling or depth
Cull Off ZWrite Off ZTest Always
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
float clip : SV_ClipDistance0;
};
float4 _PlaneNormal;
float4 _PlanePosition;
v2f vert (appdata v)
{
float4 worldPos = mul(unity_ObjectToWorld, v.vertex);
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
o.clip = dot(worldPos - _PlanePosition, _PlaneNormal);
return o;
}
sampler2D _MainTex;
fixed4 frag (v2f i) : SV_Target
{
fixed4 col = tex2D(_MainTex, i.uv);
return col;
}
ENDCG
}
}
}
I'm adding it to the camera via this script, but I don't know if that's the best way to do it:
private void OnRenderImage(RenderTexture src, RenderTexture dest)
{
_material.SetVector("_PlanePosition", planePosition);
_material.SetVector("_PlaneNormal", planeNormal);
Graphics.Blit(src, dest, _material);
}
Can anyone see what I'm doing wrong?
EDIT:
The Portal guys called this problem 'banana juice' and they explain it in this video: https://youtu.be/ivyseNMVt-4?t=1064
I've tried drawning a nice picture here: https://imgur.com/9Dc57Pm
The eye on the left is the player, and the eye on the right is my portal camera. The portal camera should only render what's beneath the yellow line (the clip plane), but it also renders the box, so it ends up looking weird for the player.
EDIT^2: i think I've found a problem in using the OnRenderImage() function to apply my material. That's a 2D texture.
How do i apply the shader to all objects in the scene?

dunno if it's still relevant but the only easy way to "apply" a shader to every object in the scene is Camera.RenderWithShader:
https://docs.unity3d.com/ScriptReference/Camera.RenderWithShader.html

Related

I can't implement the so-called pixel lighting in the 2d game on unity

I am creating a 2D unity game and my idea was to make pixel lighting. that is, as you approach the lantern, the objects will change color (according to the color palette).
the effect I want to get is shown below (this is a drawing so far).
I have no idea how such "lighting" can be implemented, at first I thought to do it using shader graph, but out of my stupidity I could not do it. (I tried many more options, but I never managed to implement my plans) for this, I ask for help from those who know.
I haven't worked with 2d lighting, so I'm not certain, but I have an old post processing shader that can pixelize and use a limited color pallet for the default render pipeline that might work for you.
Shader:
Shader "Custom/PixelPallet"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_ColorTheme("Theme", 2D) = "white" {}
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
sampler2D _MainTex;
sampler2D _ColorTheme;
float4 _MainTex_ST;
int _PixelDensity;
float2 _ScreenAspectRatioMultiplier;
float2 _SpriteAspectRatioMultiplier;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
return o;
}
fixed4 frag (v2f i) : SV_Target
{
float2 pixelScaling = _PixelDensity * _ScreenAspectRatioMultiplier;
float2 pixelCoord = round(i.uv * pixelScaling)/ pixelScaling;
fixed4 value = tex2D(_MainTex, pixelCoord);
float2 coord = float2(value.r,value.g);
return tex2D(_ColorTheme, coord);
}
ENDCG
}
}
}
Post Process Script:
using UnityEngine;
public class PostEfect : MonoBehaviour
{
private Material mat;
public Texture texture;
public int pixelDensity = 80;
void Start()
{
mat = new Material(Shader.Find("Custom/PixelPallet"));
mat.SetTexture("_ColorTheme", texture);
}
void OnRenderImage(RenderTexture src, RenderTexture dest)
{
Vector2 aspectRatioData;
if (Screen.height > Screen.width)
aspectRatioData = new Vector2((float)Screen.width / Screen.height, 1);
else
aspectRatioData = new Vector2(1, (float)Screen.height / Screen.width);
mat.SetVector("_ScreenAspectRatioMultiplier", aspectRatioData);
mat.SetInt("_PixelDensity", pixelDensity);
// Read pixels from the source RenderTexture, apply the material, copy the updated results to the destination RenderTexture
Graphics.Blit(src, dest, mat);
}
}
Just drop the script on the camera and select a color pallet sprite with point filter. Adjust the pixel Density according to your needs.

Space curvature bending in Unity3D as Post Effect/Image Effect

I'm trying to archieve a chilindrical effect like this on Unity3D:
But every solution is using material based shader, sadly I must have a Post Process effect or an Image Effect, for these reasons:
One map out of 30 needs to use this effect and there are many materials that are shared between them...
Every solution is vertex based. I've done some experiments but I have models with different polygon count, this means that the effect would create visual artifacts (but this can by fixed by editing the 3d models eventually).
I'm at an advanced stage of development.
Do you think it's possible to create a simple effect (even a fake one) that moves the pixels downwards/upwards based on the distance to the camera? (I assume I need to use the depth map)
I've tried very hard but I had no success, the effect doesn't do anything or just won't compile :(
This is the best I could come up with, the grayscale in the frag method is only to check if the shader is working, but once I define the Vert function the grayscale disappears and the shader does nothing.
Shader "Hidden/Custom/WorldCurvature"
{
HLSLINCLUDE
#include "Packages/com.unity.postprocessing/PostProcessing/Shaders/StdLib.hlsl"
TEXTURE2D_SAMPLER2D(_MainTex, sampler_MainTex);
float _Bend;
struct Attributes
{
float4 vertex : POSITION;
float2 texcoord : TEXCOORD0;
};
struct Varyings
{
float4 vertex : SV_POSITION;
float2 texcoord : TEXCOORD0;
};
float4 Frag(Varyings i) : SV_Target
{
float4 color = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.texcoord);
float luminance = dot(color.rgb, float3(0.2126729, 0.7151522, 0.0721750));
color.rgb = lerp(color.rgb, luminance.xxx, _Bend.xxx);
return color;
}
Varyings Vert(Attributes v)
{
Varyings o;
float4 vv = mul(unity_ObjectToWorld, v.vertex );
vv.xyz -= _WorldSpaceCameraPos.xyz;
vv = float4( 0.0f, (vv.x * vv.x) * - _Bend, 0.0f, 0.0f );
v.vertex += mul(unity_WorldToCamera, vv);
o.vertex = mul(unity_WorldToCamera, vv);
o.texcoord = v.texcoord;
return o;
}
ENDHLSL
SubShader
{
Cull Off ZWrite Off ZTest Always
Pass
{
HLSLPROGRAM
#pragma vertex Vert
#pragma fragment Frag
ENDHLSL
}
}
}
I've done another experiment but I think it would only work in a 2D environment, here the image stops once I activate the image effect:
Shader "Hidden/Custom/CylinderImageEffect" {
Properties {
_MainTex ("Texture", 2D) = "white" {}
}
SubShader {
Cull Off ZWrite Off ZTest Always
Pass {
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct v2f {
float4 vertex : SV_POSITION;
float2 uv : TEXCOORD0;
};
v2f vert( appdata_img v )
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.texcoord.xy;
return o;
}
sampler2D _MainTex;
fixed4 frag (v2f i) : SV_Target {
i.uv.x = 1 - acos(i.uv.x * 2 - 1) / 3.14159265;
return tex2D(_MainTex, i.uv);
}
ENDCG
}
}
}

Simulating depth camera in Unity

I am trying to simulate a depth camera in unity. I am using a shader to get the depth textures and I am able to make the camera show this depth image.
The problem now is that the scale extends beyond the closest and furthest objects, going from the near clipping pane to the far clipping plane. I want this to be in grayscale so that the most far object has a color value of 0 and the closest object has a color value of 1, regardless of where those objects are between the clipping planes.
This is a screenshot of what I get:
I am using a shader to achieve this. This is the code of the shader:
Shader "Depth/DepthPostProcessShader"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
}
SubShader
{
// No culling or depth
Cull Off ZWrite Off ZTest Always
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
return o;
}
sampler2D _MainTex;
sampler2D _CameraDepthTexture;
fixed4 frag (v2f i) : SV_Target
{
//get depth from depth texture
fixed4 depthCol = tex2D(_CameraDepthTexture, i.uv);
return depthCol;
}
ENDCG
}
}
}
I am attaching a script to the main camera. This is the code:
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class DepthPostProcessing : MonoBehaviour
{
//material that's applied when doing postprocessing
[SerializeField]
private Material postprocessMaterial;
private void Start(){
//get the camera and tell it to render a depth texture
Camera cam = GetComponent<Camera>();
cam.depthTextureMode = DepthTextureMode.Depth;
}
//method which is automatically called by unity after the camera is done rendering
private void OnRenderImage(RenderTexture source, RenderTexture destination){
//draws the pixels from the source texture to the destination texture
Graphics.Blit(source, destination, postprocessMaterial);
}
}
My idea is that I take the distance of the closest and farthest object to make a better scale. I just can't seem to find how to be able to get these properties out of sampler2D _CameraDepthTexture

Unity transparent shadow receiving plane for AR app to receive only shadows and not light reflections

I'm working on a transparent shadow receiving shader for Augmented Reality app in Unity, that is put into a plane which will receive shadows from point light or spotlight. It works fine, but the plane receives light also as you can see in the image.
Output Screenshots
I'm using unity 5.6.3p4. Please tell me what could be the problem in this script or what should I change in this code to receive only shadows and not light reflections on the plane?
The Shader Script is below:
Shader "SP/InvisibleShadowCasterForPLight" {
Properties {
_MainTex("Base (RGB)", 2D) = "white" {}
}
SubShader {
Pass {
Blend One One
Tags { "LightMode" = "ForwardAdd" }
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
#pragma multi_compile_fwdadd_fullshadows
#include "AutoLight.cginc"
sampler2D _MainTex;
float4 _MainTex_ST;
struct v2f {
float4 pos : SV_POSITION;
LIGHTING_COORDS(0,1)
float2 uv : TEXCOORD2;
};
v2f vert(appdata_base v) {
v2f o;
o.pos = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX (v.texcoord, _MainTex);
TRANSFER_VERTEX_TO_FRAGMENT(o);
return o;
}
fixed4 frag(v2f i) : COLOR
{
float attenuation = LIGHT_ATTENUATION(i);
return tex2D (_MainTex, i.uv) * attenuation;
}
ENDCG
}
}
Fallback "VertexLit"
}
Replace:
LIGHTING_COORDS(0,1)
with:
UNITY_SHADOW_COORDS(0)
Then replace:
TRANSFER_VERTEX_TO_FRAGMENT(0)
with:
TRANSFER_SHADOW(o);
Finally, in your fragment function, use this:
half shadow = SHADOW_ATTENUATION(i);
return tex2D (_MainTex, i.uv) * shadow;
That should do it!
Oh, and you may or may not also want to change blendmode to multiplicative... If the above doesn't work, try changing:
Blend One One
with:
Blend DstColor Zero

How to draw outside sprites boundaries with Unity's shaders?

I'm learning how to make shader in Unity and I have a question about shaders applied on sprites.
I made a shader which modifies a sprite to create a distortion effect:
But, as you can see on the picture linked above, my sprite is cut on the boundaries of the sprite.
Is there any way to avoid this cut and draw the missing part ?
My shader :
Shader "Shader101/testShader"
{
Properties
{
_MainTex( "Texture", 2D ) = "white" {}
_DisplaceTex( "Displacement Texture", 2D ) = "white" {}
_Power("Power", Range(-0.4, 0.4)) = 0
}
SubShader
{
Tags
{
"Queue" = "Transparent"
}
Pass
{
Blend SrcAlpha OneMinusSrcAlpha
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float4 vertex : SV_POSITION;
float2 uv : TEXCOORD0;
};
v2f vert (appdata v)
{
v2f o;
o.vertex = mul( UNITY_MATRIX_MVP, v.vertex);
o.uv = v.uv;
return o;
}
sampler2D _MainTex;
sampler2D _DisplaceTex;
float _Power;
fixed4 frag (v2f i) : SV_Target
{
float2 distuv = float2( 0, i.uv.y + _Time.x * 2 );
float2 wave = tex2D( _DisplaceTex, distuv * 5 ).xy;
wave = (( wave * 2 ) - 1) * _Power - 0.25;
float4 color = tex2D( _MainTex, i.uv + float2(wave.y, 0) );
return color;
}
ENDCG
}
}
}
Sorry for my poor english :|
Thank you
Take a look at your scene view with shaded wireframe mode on.
Here you should see that your shader does not change the vertex position, but only changes the rendered pixel colors in the fragment shader. Therefore it can not draw outside of the mesh.
The easiest way to fix it would be to modify the sprite texture with more transparent areas around your cat. (In Photoshop > Image > Canvas Size ...)
This is still a high search result without a satisfactory answer.
Two settings may help:
Extrude edges - Adds padding around the sprite
Full Rect - Required if you manually add extra space around a sprite, causes the wireframe to use all the empty space, instead of just the opaque areas.