Simulating depth camera in Unity - unity3d

I am trying to simulate a depth camera in unity. I am using a shader to get the depth textures and I am able to make the camera show this depth image.
The problem now is that the scale extends beyond the closest and furthest objects, going from the near clipping pane to the far clipping plane. I want this to be in grayscale so that the most far object has a color value of 0 and the closest object has a color value of 1, regardless of where those objects are between the clipping planes.
This is a screenshot of what I get:
I am using a shader to achieve this. This is the code of the shader:
Shader "Depth/DepthPostProcessShader"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
}
SubShader
{
// No culling or depth
Cull Off ZWrite Off ZTest Always
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
return o;
}
sampler2D _MainTex;
sampler2D _CameraDepthTexture;
fixed4 frag (v2f i) : SV_Target
{
//get depth from depth texture
fixed4 depthCol = tex2D(_CameraDepthTexture, i.uv);
return depthCol;
}
ENDCG
}
}
}
I am attaching a script to the main camera. This is the code:
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class DepthPostProcessing : MonoBehaviour
{
//material that's applied when doing postprocessing
[SerializeField]
private Material postprocessMaterial;
private void Start(){
//get the camera and tell it to render a depth texture
Camera cam = GetComponent<Camera>();
cam.depthTextureMode = DepthTextureMode.Depth;
}
//method which is automatically called by unity after the camera is done rendering
private void OnRenderImage(RenderTexture source, RenderTexture destination){
//draws the pixels from the source texture to the destination texture
Graphics.Blit(source, destination, postprocessMaterial);
}
}
My idea is that I take the distance of the closest and farthest object to make a better scale. I just can't seem to find how to be able to get these properties out of sampler2D _CameraDepthTexture

Related

Object is only visible to the left eye

I have a Unity project using MRTK.
After I changed the MRTK/Standard shader with the following shader, it only renders the object to the left eye.
How can I render the object to both eyes?
Shader "HoloUS/BrightnessContrast"
{
Properties
{
_MainTex("Texture", 2D) = "white" {}
_Width("Width", Float) = 1
_Center("Center", Float) = 0.5
}
SubShader
{
Tags { "RenderType" = "Opaque" }
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct VInp
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct VOut
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
sampler2D _MainTex;
float4 _MainTex_ST;
float _Center;
float _Width;
VOut vert(VInp inp)
{
VOut o;
o.vertex = UnityObjectToClipPos(inp.vertex);
o.uv = TRANSFORM_TEX(inp.uv, _MainTex);
return o;
}
fixed4 frag(VOut inp) : SV_Target
{
float min = _Center - _Width / 2;
return tex2D(_MainTex, inp.uv) * _Width + min;
}
ENDCG
}
}
}
For shaders with Hololens and MRTK2, there is an upgrade path perhaps that needs to be clicked through:
https://learn.microsoft.com/en-us/windows/mixed-reality/mrtk-unity/mrtk2/features/rendering/mrtk-standard-shader?q=shader&view=mrtkunity-2022-05
Also, Unity defines following on stereo rendering for HoloLens for shader script requirements:
https://docs.unity3d.com/Manual/SinglePassStereoRenderingHoloLens.html
From version of Unity, could not tell the specific version and MRTK version using in sample above. If have further details, perhaps can assist further too with community.
If you are using single pass vr, there are few changes in shader declaration, in order to recognise stereo texture input in single pass.
//add this in struct appdata
UNITY_VERTEX_INPUT_INSTANCE_ID
//add this in struct v2f
UNITY_VERTEX_OUTPUT_STEREO
//replace sampler2D
UNITY_DECLARE_SCREENSPACE_TEXTURE(_MainTex);
//add these inside v2f vert()
UNITY_SETUP_INSTANCE_ID(v);
UNITY_INITIALIZE_OUTPUT(v2f, o);
UNITY_INITIALIZE_VERTEX_OUTPUT_STEREO(o);
Additional link: Single Pass Instanced rendering

I can't implement the so-called pixel lighting in the 2d game on unity

I am creating a 2D unity game and my idea was to make pixel lighting. that is, as you approach the lantern, the objects will change color (according to the color palette).
the effect I want to get is shown below (this is a drawing so far).
I have no idea how such "lighting" can be implemented, at first I thought to do it using shader graph, but out of my stupidity I could not do it. (I tried many more options, but I never managed to implement my plans) for this, I ask for help from those who know.
I haven't worked with 2d lighting, so I'm not certain, but I have an old post processing shader that can pixelize and use a limited color pallet for the default render pipeline that might work for you.
Shader:
Shader "Custom/PixelPallet"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_ColorTheme("Theme", 2D) = "white" {}
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
sampler2D _MainTex;
sampler2D _ColorTheme;
float4 _MainTex_ST;
int _PixelDensity;
float2 _ScreenAspectRatioMultiplier;
float2 _SpriteAspectRatioMultiplier;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
return o;
}
fixed4 frag (v2f i) : SV_Target
{
float2 pixelScaling = _PixelDensity * _ScreenAspectRatioMultiplier;
float2 pixelCoord = round(i.uv * pixelScaling)/ pixelScaling;
fixed4 value = tex2D(_MainTex, pixelCoord);
float2 coord = float2(value.r,value.g);
return tex2D(_ColorTheme, coord);
}
ENDCG
}
}
}
Post Process Script:
using UnityEngine;
public class PostEfect : MonoBehaviour
{
private Material mat;
public Texture texture;
public int pixelDensity = 80;
void Start()
{
mat = new Material(Shader.Find("Custom/PixelPallet"));
mat.SetTexture("_ColorTheme", texture);
}
void OnRenderImage(RenderTexture src, RenderTexture dest)
{
Vector2 aspectRatioData;
if (Screen.height > Screen.width)
aspectRatioData = new Vector2((float)Screen.width / Screen.height, 1);
else
aspectRatioData = new Vector2(1, (float)Screen.height / Screen.width);
mat.SetVector("_ScreenAspectRatioMultiplier", aspectRatioData);
mat.SetInt("_PixelDensity", pixelDensity);
// Read pixels from the source RenderTexture, apply the material, copy the updated results to the destination RenderTexture
Graphics.Blit(src, dest, mat);
}
}
Just drop the script on the camera and select a color pallet sprite with point filter. Adjust the pixel Density according to your needs.

Unity 2D: Area Color Inverse Effect

I am trying to create a GameObejct that if active reverses the colors of anything behind it. Ideally, this GameObject expands from its centre point to a certain size radius when it appears/becomes active.
So imagine a circle whose Scale increases from x=0 and y=0 to x=5 and y=5 gradually over 1 or 2 seconds and reverses the colors of anything behind. Doesn't exactly matter how long or how big the actual circle is, just an example of the idea. I tried just creating a GameObect with a mask and a shader I found online, but t didn't work at all and I still wasn't even sure how to animate the expansion.
Does anyone have any idea on how I could make this dream effect of mine a reality?
Here is a shader that does nothing except invert the color.
The important part is Alpha:Blend and Blend OneMinusDstColor Zero.
Shader "Custom/Invert"
{
Properties
{
_Color ("Color", Color) = (1,1,1,1)
_MainTex ("Albedo (RGB)", 2D) = "white" {}
}
SubShader
{
Tags { "RenderType"="Transparent" "Queue"="Transparent" }
LOD 200
Blend OneMinusDstColor Zero
PASS
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma target 3.0 Alpha:Blend
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
fixed4 _Color;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = float2(0,0);
return o;
}
fixed4 frag (v2f i) : SV_Target
{
return _Color;
}
ENDCG
}
}
FallBack "Diffuse"
}
You can put this on any sprite or 3D object to get this effect:
As for the animation, King's answer is nice and simple. Alternatively you could animate the scale by code:
public class Scale : MonoBehaviour
{
void Update ()
{
var scale = Mathf.Sin(Time.timeSinceLevelLoad / 4f) * 20;
this.transform.localScale = new Vector3(scale, scale, 1);
}
}
This particular code will make the sprite 'pulse'. Like this:
I think you should be able to make your own scaling script from this example.
Select your GameObject->Window->Animation->Animation->Create->Save->Add Property->Transform->Scale. At your 0:00 keyframe set your scale for x,y,z to 1. Click over to your 1:00 keyframe and set your scale for x,y,z to 2. Press play and your animation should loop from scale of 1 to 2.

Portal shader in Unity

I'm trying to implement portals in Unity. The way I'm doing it is having a camera mimic the player's position and rotation in the other portals local space.
My problem is, that whatever is behind the portal gets rendered on this camera, so if I move back from the portal, some other stuff will clip in. I've made a shader that's supposed to add a clipping plane, but it seems it's either clipping everything or nothing:
Shader "Hidden/ClipPlaneShader"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_PlaneNormal("PlaneNormal",Vector) = (0,1,0,0)
_PlanePosition("PlanePosition",Vector) = (0,0.5,0,0)
}
SubShader
{
// No culling or depth
Cull Off ZWrite Off ZTest Always
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
float clip : SV_ClipDistance0;
};
float4 _PlaneNormal;
float4 _PlanePosition;
v2f vert (appdata v)
{
float4 worldPos = mul(unity_ObjectToWorld, v.vertex);
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
o.clip = dot(worldPos - _PlanePosition, _PlaneNormal);
return o;
}
sampler2D _MainTex;
fixed4 frag (v2f i) : SV_Target
{
fixed4 col = tex2D(_MainTex, i.uv);
return col;
}
ENDCG
}
}
}
I'm adding it to the camera via this script, but I don't know if that's the best way to do it:
private void OnRenderImage(RenderTexture src, RenderTexture dest)
{
_material.SetVector("_PlanePosition", planePosition);
_material.SetVector("_PlaneNormal", planeNormal);
Graphics.Blit(src, dest, _material);
}
Can anyone see what I'm doing wrong?
EDIT:
The Portal guys called this problem 'banana juice' and they explain it in this video: https://youtu.be/ivyseNMVt-4?t=1064
I've tried drawning a nice picture here: https://imgur.com/9Dc57Pm
The eye on the left is the player, and the eye on the right is my portal camera. The portal camera should only render what's beneath the yellow line (the clip plane), but it also renders the box, so it ends up looking weird for the player.
EDIT^2: i think I've found a problem in using the OnRenderImage() function to apply my material. That's a 2D texture.
How do i apply the shader to all objects in the scene?
dunno if it's still relevant but the only easy way to "apply" a shader to every object in the scene is Camera.RenderWithShader:
https://docs.unity3d.com/ScriptReference/Camera.RenderWithShader.html

How to draw outside sprites boundaries with Unity's shaders?

I'm learning how to make shader in Unity and I have a question about shaders applied on sprites.
I made a shader which modifies a sprite to create a distortion effect:
But, as you can see on the picture linked above, my sprite is cut on the boundaries of the sprite.
Is there any way to avoid this cut and draw the missing part ?
My shader :
Shader "Shader101/testShader"
{
Properties
{
_MainTex( "Texture", 2D ) = "white" {}
_DisplaceTex( "Displacement Texture", 2D ) = "white" {}
_Power("Power", Range(-0.4, 0.4)) = 0
}
SubShader
{
Tags
{
"Queue" = "Transparent"
}
Pass
{
Blend SrcAlpha OneMinusSrcAlpha
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float4 vertex : SV_POSITION;
float2 uv : TEXCOORD0;
};
v2f vert (appdata v)
{
v2f o;
o.vertex = mul( UNITY_MATRIX_MVP, v.vertex);
o.uv = v.uv;
return o;
}
sampler2D _MainTex;
sampler2D _DisplaceTex;
float _Power;
fixed4 frag (v2f i) : SV_Target
{
float2 distuv = float2( 0, i.uv.y + _Time.x * 2 );
float2 wave = tex2D( _DisplaceTex, distuv * 5 ).xy;
wave = (( wave * 2 ) - 1) * _Power - 0.25;
float4 color = tex2D( _MainTex, i.uv + float2(wave.y, 0) );
return color;
}
ENDCG
}
}
}
Sorry for my poor english :|
Thank you
Take a look at your scene view with shaded wireframe mode on.
Here you should see that your shader does not change the vertex position, but only changes the rendered pixel colors in the fragment shader. Therefore it can not draw outside of the mesh.
The easiest way to fix it would be to modify the sprite texture with more transparent areas around your cat. (In Photoshop > Image > Canvas Size ...)
This is still a high search result without a satisfactory answer.
Two settings may help:
Extrude edges - Adds padding around the sprite
Full Rect - Required if you manually add extra space around a sprite, causes the wireframe to use all the empty space, instead of just the opaque areas.