In Unity I'm planning on using a geometry shader for processing points into quads and can't quite figure out why I'm not getting output from my geometry shader. I've edited it down to a minimum working example, as seen here:
Shader "foo/bar"
{
SubShader{
Cull Off
Lighting Off
ZWrite Off
Pass{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#pragma geometry geom
struct appdata {
float4 vertex : POSITION;
};
struct v2g {
float4 vertex : POSITION;
};
struct g2f {
float4 vertex : POSITION;
};
v2g vert(appdata v) {
v2g o = (v2g)0;
o.vertex = v.vertex;
return o;
}
[maxvertexcount(4)]
void geom(point v2g p[1], inout TriangleStream<g2f> tristream)
{
g2f o = (g2f)0;
o.vertex = float4(0.1, 0.1, 0, 0);
tristream.Append(o);
o.vertex = float4(0.1, 0.9, 0, 0);
tristream.Append(o);
o.vertex = float4(0.9, 0.9, 0, 0);
tristream.Append(o);
}
fixed4 frag(g2f i) : COLOR
{
return fixed4(1,1,1,1);
}
ENDCG
}
}
}
I'd expect that to draw a white triangle covering just under half the texture I'm rendering to. Instead it displays all black, just as it was before the shader.
So far I've:
Removed every possible source of culling I can think of
Made absolute sure my setup works when rendering a mesh in a similar fashion
Checked that this i getting input and running
and scoured the very limited amount of documentation available
I'm all outta ideas. If anyone has even a minor clue as to what I'm doing wrong please let me know. Thanks.
-Fraser
Related
I am creating a 2D unity game and my idea was to make pixel lighting. that is, as you approach the lantern, the objects will change color (according to the color palette).
the effect I want to get is shown below (this is a drawing so far).
I have no idea how such "lighting" can be implemented, at first I thought to do it using shader graph, but out of my stupidity I could not do it. (I tried many more options, but I never managed to implement my plans) for this, I ask for help from those who know.
I haven't worked with 2d lighting, so I'm not certain, but I have an old post processing shader that can pixelize and use a limited color pallet for the default render pipeline that might work for you.
Shader:
Shader "Custom/PixelPallet"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_ColorTheme("Theme", 2D) = "white" {}
}
SubShader
{
Tags { "RenderType"="Opaque" }
LOD 100
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
};
sampler2D _MainTex;
sampler2D _ColorTheme;
float4 _MainTex_ST;
int _PixelDensity;
float2 _ScreenAspectRatioMultiplier;
float2 _SpriteAspectRatioMultiplier;
v2f vert (appdata v)
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = TRANSFORM_TEX(v.uv, _MainTex);
return o;
}
fixed4 frag (v2f i) : SV_Target
{
float2 pixelScaling = _PixelDensity * _ScreenAspectRatioMultiplier;
float2 pixelCoord = round(i.uv * pixelScaling)/ pixelScaling;
fixed4 value = tex2D(_MainTex, pixelCoord);
float2 coord = float2(value.r,value.g);
return tex2D(_ColorTheme, coord);
}
ENDCG
}
}
}
Post Process Script:
using UnityEngine;
public class PostEfect : MonoBehaviour
{
private Material mat;
public Texture texture;
public int pixelDensity = 80;
void Start()
{
mat = new Material(Shader.Find("Custom/PixelPallet"));
mat.SetTexture("_ColorTheme", texture);
}
void OnRenderImage(RenderTexture src, RenderTexture dest)
{
Vector2 aspectRatioData;
if (Screen.height > Screen.width)
aspectRatioData = new Vector2((float)Screen.width / Screen.height, 1);
else
aspectRatioData = new Vector2(1, (float)Screen.height / Screen.width);
mat.SetVector("_ScreenAspectRatioMultiplier", aspectRatioData);
mat.SetInt("_PixelDensity", pixelDensity);
// Read pixels from the source RenderTexture, apply the material, copy the updated results to the destination RenderTexture
Graphics.Blit(src, dest, mat);
}
}
Just drop the script on the camera and select a color pallet sprite with point filter. Adjust the pixel Density according to your needs.
I'm trying to archieve a chilindrical effect like this on Unity3D:
But every solution is using material based shader, sadly I must have a Post Process effect or an Image Effect, for these reasons:
One map out of 30 needs to use this effect and there are many materials that are shared between them...
Every solution is vertex based. I've done some experiments but I have models with different polygon count, this means that the effect would create visual artifacts (but this can by fixed by editing the 3d models eventually).
I'm at an advanced stage of development.
Do you think it's possible to create a simple effect (even a fake one) that moves the pixels downwards/upwards based on the distance to the camera? (I assume I need to use the depth map)
I've tried very hard but I had no success, the effect doesn't do anything or just won't compile :(
This is the best I could come up with, the grayscale in the frag method is only to check if the shader is working, but once I define the Vert function the grayscale disappears and the shader does nothing.
Shader "Hidden/Custom/WorldCurvature"
{
HLSLINCLUDE
#include "Packages/com.unity.postprocessing/PostProcessing/Shaders/StdLib.hlsl"
TEXTURE2D_SAMPLER2D(_MainTex, sampler_MainTex);
float _Bend;
struct Attributes
{
float4 vertex : POSITION;
float2 texcoord : TEXCOORD0;
};
struct Varyings
{
float4 vertex : SV_POSITION;
float2 texcoord : TEXCOORD0;
};
float4 Frag(Varyings i) : SV_Target
{
float4 color = SAMPLE_TEXTURE2D(_MainTex, sampler_MainTex, i.texcoord);
float luminance = dot(color.rgb, float3(0.2126729, 0.7151522, 0.0721750));
color.rgb = lerp(color.rgb, luminance.xxx, _Bend.xxx);
return color;
}
Varyings Vert(Attributes v)
{
Varyings o;
float4 vv = mul(unity_ObjectToWorld, v.vertex );
vv.xyz -= _WorldSpaceCameraPos.xyz;
vv = float4( 0.0f, (vv.x * vv.x) * - _Bend, 0.0f, 0.0f );
v.vertex += mul(unity_WorldToCamera, vv);
o.vertex = mul(unity_WorldToCamera, vv);
o.texcoord = v.texcoord;
return o;
}
ENDHLSL
SubShader
{
Cull Off ZWrite Off ZTest Always
Pass
{
HLSLPROGRAM
#pragma vertex Vert
#pragma fragment Frag
ENDHLSL
}
}
}
I've done another experiment but I think it would only work in a 2D environment, here the image stops once I activate the image effect:
Shader "Hidden/Custom/CylinderImageEffect" {
Properties {
_MainTex ("Texture", 2D) = "white" {}
}
SubShader {
Cull Off ZWrite Off ZTest Always
Pass {
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct v2f {
float4 vertex : SV_POSITION;
float2 uv : TEXCOORD0;
};
v2f vert( appdata_img v )
{
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.texcoord.xy;
return o;
}
sampler2D _MainTex;
fixed4 frag (v2f i) : SV_Target {
i.uv.x = 1 - acos(i.uv.x * 2 - 1) / 3.14159265;
return tex2D(_MainTex, i.uv);
}
ENDCG
}
}
}
I'm trying to make a sobel edge detection shader in Unity, but I have no shader knowledge.
For the first part, I need to get the camera depth information which I did.
After trying to develop the shader further (implementing the algorithm) I got this error:
Shader warning in 'Custom/SobelEdgeDetectionShader': Custom/SobelEdgeDetectionShader shader is not supported on this GPU (none of subshaders/fallbacks are suitable)
After trying to find a solution to the problem (which was a failure) I decided to delete the lines I wrote and revert the code back to the point where the shader worked, but the error persisted.
Restarting Unity didn't work either.
Here is the shader:
Shader "Custom/SobelEdgeDetectionShader"
{
Properties
{
_DepthLevel("Depth Level", Range(1, 3)) = 2
}
SubShader
{
UsePass "Legacy Shaders/VertexLit/SHADOWCASTER"
Pass
{
CGPROGRAM
#pragma exclude_renderers d3d11
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
uniform sampler2D_float _CameraDepthTexture;
uniform fixed _DepthLevel;
struct uinput
{
float4 pos : POSITION;
half2 uv : TEXCOORD0;
};
struct uoutput
{
float4 pos : SV_POSITION;
half2 uv : TEXCOORD0;
};
uoutput vert(uinput i)
{
uoutput o;
o.pos = UnityObjectToClipPos(i.pos);
o.uv = MultiplyUV(UNITY_MATRIX_TEXTURE0, i.uv);
return o;
}
fixed4 frag(uoutput o) : COLOR
{
float depth = SAMPLE_DEPTH_TEXTURE(_CameraDepthTexture, o.uv);
float linearDepth = Linear01Depth(depth);
float4 depthInfo;
depthInfo = fixed4(linearDepth, linearDepth, linearDepth, 1.0) * 10 * _DepthLevel;
return depthInfo;
}
ENDCG
}
}
}
If I missed any important information, please let me know.
I'm trying to implement portals in Unity. The way I'm doing it is having a camera mimic the player's position and rotation in the other portals local space.
My problem is, that whatever is behind the portal gets rendered on this camera, so if I move back from the portal, some other stuff will clip in. I've made a shader that's supposed to add a clipping plane, but it seems it's either clipping everything or nothing:
Shader "Hidden/ClipPlaneShader"
{
Properties
{
_MainTex ("Texture", 2D) = "white" {}
_PlaneNormal("PlaneNormal",Vector) = (0,1,0,0)
_PlanePosition("PlanePosition",Vector) = (0,0.5,0,0)
}
SubShader
{
// No culling or depth
Cull Off ZWrite Off ZTest Always
Pass
{
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float2 uv : TEXCOORD0;
float4 vertex : SV_POSITION;
float clip : SV_ClipDistance0;
};
float4 _PlaneNormal;
float4 _PlanePosition;
v2f vert (appdata v)
{
float4 worldPos = mul(unity_ObjectToWorld, v.vertex);
v2f o;
o.vertex = UnityObjectToClipPos(v.vertex);
o.uv = v.uv;
o.clip = dot(worldPos - _PlanePosition, _PlaneNormal);
return o;
}
sampler2D _MainTex;
fixed4 frag (v2f i) : SV_Target
{
fixed4 col = tex2D(_MainTex, i.uv);
return col;
}
ENDCG
}
}
}
I'm adding it to the camera via this script, but I don't know if that's the best way to do it:
private void OnRenderImage(RenderTexture src, RenderTexture dest)
{
_material.SetVector("_PlanePosition", planePosition);
_material.SetVector("_PlaneNormal", planeNormal);
Graphics.Blit(src, dest, _material);
}
Can anyone see what I'm doing wrong?
EDIT:
The Portal guys called this problem 'banana juice' and they explain it in this video: https://youtu.be/ivyseNMVt-4?t=1064
I've tried drawning a nice picture here: https://imgur.com/9Dc57Pm
The eye on the left is the player, and the eye on the right is my portal camera. The portal camera should only render what's beneath the yellow line (the clip plane), but it also renders the box, so it ends up looking weird for the player.
EDIT^2: i think I've found a problem in using the OnRenderImage() function to apply my material. That's a 2D texture.
How do i apply the shader to all objects in the scene?
dunno if it's still relevant but the only easy way to "apply" a shader to every object in the scene is Camera.RenderWithShader:
https://docs.unity3d.com/ScriptReference/Camera.RenderWithShader.html
I'm learning how to make shader in Unity and I have a question about shaders applied on sprites.
I made a shader which modifies a sprite to create a distortion effect:
But, as you can see on the picture linked above, my sprite is cut on the boundaries of the sprite.
Is there any way to avoid this cut and draw the missing part ?
My shader :
Shader "Shader101/testShader"
{
Properties
{
_MainTex( "Texture", 2D ) = "white" {}
_DisplaceTex( "Displacement Texture", 2D ) = "white" {}
_Power("Power", Range(-0.4, 0.4)) = 0
}
SubShader
{
Tags
{
"Queue" = "Transparent"
}
Pass
{
Blend SrcAlpha OneMinusSrcAlpha
CGPROGRAM
#pragma vertex vert
#pragma fragment frag
#include "UnityCG.cginc"
struct appdata
{
float4 vertex : POSITION;
float2 uv : TEXCOORD0;
};
struct v2f
{
float4 vertex : SV_POSITION;
float2 uv : TEXCOORD0;
};
v2f vert (appdata v)
{
v2f o;
o.vertex = mul( UNITY_MATRIX_MVP, v.vertex);
o.uv = v.uv;
return o;
}
sampler2D _MainTex;
sampler2D _DisplaceTex;
float _Power;
fixed4 frag (v2f i) : SV_Target
{
float2 distuv = float2( 0, i.uv.y + _Time.x * 2 );
float2 wave = tex2D( _DisplaceTex, distuv * 5 ).xy;
wave = (( wave * 2 ) - 1) * _Power - 0.25;
float4 color = tex2D( _MainTex, i.uv + float2(wave.y, 0) );
return color;
}
ENDCG
}
}
}
Sorry for my poor english :|
Thank you
Take a look at your scene view with shaded wireframe mode on.
Here you should see that your shader does not change the vertex position, but only changes the rendered pixel colors in the fragment shader. Therefore it can not draw outside of the mesh.
The easiest way to fix it would be to modify the sprite texture with more transparent areas around your cat. (In Photoshop > Image > Canvas Size ...)
This is still a high search result without a satisfactory answer.
Two settings may help:
Extrude edges - Adds padding around the sprite
Full Rect - Required if you manually add extra space around a sprite, causes the wireframe to use all the empty space, instead of just the opaque areas.