I'm trying to program a Compute-Shader Perlin Noise code based on a working C# code.
The problem is that i only got smooth dots.
Left C# Working, Right Compute-Shader
with this values for both
If i lower the frequency the dots get bigger:
This is the code i'm using
#pragma kernel CSMain
RWTexture2D<float> Result;
RWStructuredBuffer<float> resfloat;
float res;
float frequency;
float octaves;
float lacunarity;
float persistence;
StructuredBuffer<float3> gradients3D;
StructuredBuffer<int> hash;
float lerp(float v0, float v1, float t);
float Dot(float3 g, float x, float y, float z);
float Smooth(float t);
float Perlin3D(float3 v, float frequency);
float noise(float3 v, float frequency, int octaves, float lacunarity, float persistence);
int hashMask = 255;
int gradientsMask3D = 15;
[numthreads(8, 8, 1)]
void CSMain(uint3 id : SV_DispatchThreadID)
{
float3 v = float3(id.x, id.y, id.z) / res;
float h = 0.0;
h = noise(v, frequency, octaves, lacunarity, persistence);
Result[id.xy] = float4(h, 0, 0, 0);
resfloat[id.x + id.y * res] = h;
}
float lerp(float v0, float v1, float t) {
return v0 + t * (v1 - v0);
}
float Smooth(float t) {
return t * t * t * (t * (t * (float) 6 - (float) 15) + (float) 10);
}
float Perlin3D(float3 v, float frequency) {
v *= frequency;
int ix0 = (int) floor(v.x);
int iy0 = (int) floor(v.y);
int iz0 = (int) floor(v.z);
float tx0 = v.x - ix0;
float ty0 = v.y - iy0;
float tz0 = v.z - iz0;
float tx1 = tx0 - (float) 1;
float ty1 = ty0 - (float) 1;
float tz1 = tz0 - (float) 1;
ix0 &= hashMask;
iy0 &= hashMask;
iz0 &= hashMask;
int ix1 = ix0 + (float) 1;
int iy1 = iy0 + (float) 1;
int iz1 = iz0 + (float) 1;
int h0 = hash[ix0];
int h1 = hash[ix1];
int h00 = hash[h0 + iy0];
int h10 = hash[h1 + iy0];
int h01 = hash[h0 + iy1];
int h11 = hash[h1 + iy1];
float3 g000 = gradients3D[hash[h00 + iz0] & gradientsMask3D];
float3 g100 = gradients3D[hash[h10 + iz0] & gradientsMask3D];
float3 g010 = gradients3D[hash[h01 + iz0] & gradientsMask3D];
float3 g110 = gradients3D[hash[h11 + iz0] & gradientsMask3D];
float3 g001 = gradients3D[hash[h00 + iz1] & gradientsMask3D];
float3 g101 = gradients3D[hash[h10 + iz1] & gradientsMask3D];
float3 g011 = gradients3D[hash[h01 + iz1] & gradientsMask3D];
float3 g111 = gradients3D[hash[h11 + iz1] & gradientsMask3D];
float v000 = dot(g000, float3(tx0, ty0, tz0));
float v100 = dot(g100, float3(tx1, ty0, tz0));
float v010 = dot(g010, float3(tx0, ty1, tz0));
float v110 = dot(g110, float3(tx1, ty1, tz0));
float v001 = dot(g001, float3(tx0, ty0, tz1));
float v101 = dot(g101, float3(tx1, ty0, tz1));
float v011 = dot(g011, float3(tx0, ty1, tz1));
float v111 = dot(g111, float3(tx1, ty1, tz1));
float tx = Smooth(tx0);
float ty = Smooth(ty0);
float tz = Smooth(tz0);
return lerp(
lerp(lerp(v000, v100, tx), lerp(v010, v110, tx), ty),
lerp(lerp(v001, v101, tx), lerp(v011, v111, tx), ty),
tz);
}
float noise(float3 v, float frequency, int octaves, float lacunarity, float persistence)
{
float sum = Perlin3D(v, frequency);
float amplitude = 1;
float range = 1;
for (int o = 1; o < octaves; o++) {
frequency *= lacunarity;
amplitude *= persistence;
range += amplitude;
sum += Perlin3D(v, frequency) * amplitude;
}
return sum / range;
}
This is the C# working code
And this is the C# code that calls the shader
I already checked that the StructuredBuffer (gradients3D and hash), and the float params are correctly loaded .
Any ideas?
The problem was that for some reason, when the function Perlin3D, used hashMask and gradientsMask3D, they had a 0.
So i moved the definition to the function:
float Perlin3D(float3 v, float frequency)
{
int hashMask = 255;
int gradientsMask3D = 15;
v *= frequency;
int ix0 = (int) floor(v.x);
Related
Context
I've been trying to create a buoyancy script that samples the position of a point, tests if it's under a certain level (the "water level"), and adds a force on that position based on depth. Separately, I worked on creating a nice looking water shader in Shadergraph, and had the bright idea to add in waves using the Simple Noise node + vertex displacement.
However, the only way (I could think of) to use those displaced values as the float "water level" was to rewrite the entire node tree in C#, and use that to sample the "water level" at that position.
Problem
For some reason, the final displaced mesh and the calculated positions are different, causing the buoyancy script to assume that the "water level" is higher/lower than it is. The difference isn't large, so I'm assuming there's an error somewhere within either the C# Node Graph or C# Simple Noise translation.
Is that correct? If so, where and what's my misunderstanding? If not, what else could have gone wrong?
Approach
Node Graph
Image of the node graph for the wave vector displacement
*If you need zoomed in pictures, let me know!
All things considered, it's relatively simple. It:
Takes the world position as a UV, and offsets and tiles it.
Feeds the UV to a Simple Noise node, and multiplies the noise by a strength.
Clamps the output.
Repeats 1-3 again and adds both together for more detail.
Replaces the Y value of the vertex position with the combined wave value.
C# Script
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class WaveHeightCalculator : MonoBehaviour
{
// Step 1
[SerializeField] Material _waterMaterial;
[Header("Waves")]
[SerializeField] float _waveTiling;
[SerializeField] float _waveOffset;
[SerializeField] float _waveMin;
[SerializeField] float _waveMax;
[Header("Small Waves")]
[SerializeField] float _wavesSmallScale;
[SerializeField] float _wavesSmallStrength;
[SerializeField] Vector2 _wavesSmallVelocity;
[Header("Large Waves")]
[SerializeField] float _wavesLargeScale;
[SerializeField] float _wavesLargeStrength;
[SerializeField] Vector2 _wavesLargeVelocity;
// Step 2
private void OnValidate()
{
_waterMaterial = GetComponent<Renderer>().sharedMaterial;
SetVariables();
}
void SetVariables()
{
_waveTiling = _waterMaterial.GetFloat("_Wave_Tiling");
_waveOffset = _waterMaterial.GetFloat("_Wave_Offset");
_waveMin = _waterMaterial.GetFloat("_Wave_Min");
_waveMax = _waterMaterial.GetFloat("_Wave_Max");
_wavesSmallScale = _waterMaterial.GetFloat("_Waves_Small_Scale");
_wavesSmallStrength = _waterMaterial.GetFloat("_Waves_Small_Strength");
_wavesSmallVelocity = _waterMaterial.GetVector("_Waves_Small_Velocity");
_wavesLargeScale = _waterMaterial.GetFloat("_Waves_Large_Scale");
_wavesLargeStrength = _waterMaterial.GetFloat("_Waves_Large_Strength");
_wavesLargeVelocity = _waterMaterial.GetVector("_Waves_Large_Velocity");
}
// Step 3
public float GetWaveHeightAtPosition(Vector3 position)
{
Vector2 noiseMapUV;
noiseMapUV = new Vector2(position.x, position.z) * _waveTiling;
// Calculate Small Waves
Vector2 wavesSmallUVOffset = (Time.time / 20) * _wavesSmallVelocity;
float noiseValueAtUVPlusOffset = UnitySimpleNoiseAtUV(noiseMapUV + wavesSmallUVOffset, _wavesSmallScale);
float wavesSmall = noiseValueAtUVPlusOffset * _wavesSmallStrength;
// Calculate Large Waves
Vector2 wavesLargeUVOffset = (Time.time / 20) * _wavesLargeVelocity;
noiseValueAtUVPlusOffset = UnitySimpleNoiseAtUV(noiseMapUV + wavesLargeUVOffset, _wavesLargeScale);
float wavesLarge = noiseValueAtUVPlusOffset * _wavesLargeStrength;
// Combine
float waveHeight = wavesSmall + wavesLarge;
// Clamp
waveHeight = Mathf.Clamp(waveHeight, _waveMin, _waveMax);
// Offset
waveHeight += _waveOffset;
return waveHeight;
}
In the C# script, a couple of things are going on. Here's my thought process for it:
It assigns the relevant material properties to member variables.
It sets those variables in the OnValidate() function.
It uses those variables to calculate the wave value; equivalent to the "water level".
The script also contains and relies on my best attempt at translating the Simple Noise node from "Show Generated Code", which looked liked this.
Generated Code
inline float Unity_SimpleNoise_RandomValue_float (float2 uv)
{
float angle = dot(uv, float2(12.9898, 78.233));
#if defined(SHADER_API_MOBILE) && (defined(SHADER_API_GLES) || defined(SHADER_API_GLES3) || defined(SHADER_API_VULKAN))
// 'sin()' has bad precision on Mali GPUs for inputs > 10000
angle = fmod(angle, TWO_PI); // Avoid large inputs to sin()
#endif
return frac(sin(angle)*43758.5453);
}
inline float Unity_SimpleNnoise_Interpolate_float (float a, float b, float t)
{
return (1.0-t)*a + (t*b);
}
inline float Unity_SimpleNoise_ValueNoise_float (float2 uv)
{
float2 i = floor(uv);
float2 f = frac(uv);
f = f * f * (3.0 - 2.0 * f);
uv = abs(frac(uv) - 0.5);
float2 c0 = i + float2(0.0, 0.0);
float2 c1 = i + float2(1.0, 0.0);
float2 c2 = i + float2(0.0, 1.0);
float2 c3 = i + float2(1.0, 1.0);
float r0 = Unity_SimpleNoise_RandomValue_float(c0);
float r1 = Unity_SimpleNoise_RandomValue_float(c1);
float r2 = Unity_SimpleNoise_RandomValue_float(c2);
float r3 = Unity_SimpleNoise_RandomValue_float(c3);
float bottomOfGrid = Unity_SimpleNnoise_Interpolate_float(r0, r1, f.x);
float topOfGrid = Unity_SimpleNnoise_Interpolate_float(r2, r3, f.x);
float t = Unity_SimpleNnoise_Interpolate_float(bottomOfGrid, topOfGrid, f.y);
return t;
}
void Unity_SimpleNoise_float(float2 UV, float Scale, out float Out)
{
float t = 0.0;
float freq = pow(2.0, float(0));
float amp = pow(0.5, float(3-0));
t += Unity_SimpleNoise_ValueNoise_float(float2(UV.x*Scale/freq, UV.y*Scale/freq))*amp;
freq = pow(2.0, float(1));
amp = pow(0.5, float(3-1));
t += Unity_SimpleNoise_ValueNoise_float(float2(UV.x*Scale/freq, UV.y*Scale/freq))*amp;
freq = pow(2.0, float(2));
amp = pow(0.5, float(3-2));
t += Unity_SimpleNoise_ValueNoise_float(float2(UV.x*Scale/freq, UV.y*Scale/freq))*amp;
Out = t;
}
/* WARNING: $splice Could not find named fragment 'CustomInterpolatorPreVertex' */
// Graph Vertex
// GraphVertex: <None>
/* WARNING: $splice Could not find named fragment 'CustomInterpolatorPreSurface' */
// Graph Pixel
struct SurfaceDescription
{
float4 Out;
};
Translated Code
float float_frac(float x) { return x - Mathf.Floor(x);}
Vector2 frac(Vector2 x) { return x - new Vector2(Mathf.Floor(x.x), Mathf.Floor(x.y));}
float sin(float x) { return Mathf.Sin(x);}
float dot(Vector2 a, Vector2 b) { return a.x * b.x + a.y * b.y;}
float float_floor(float x) { return Mathf.Floor(x);}
Vector2 floor(Vector2 x) { return new Vector2(Mathf.Floor(x.x), Mathf.Floor(x.y));}
float float_abs(float x) { return Mathf.Abs(x);}
Vector2 abs(Vector2 x) { return new Vector2(Mathf.Abs(x.x), Mathf.Abs(x.y));}
float pow (float x, float y) { return Mathf.Pow(x, y);}
float Unity_SimpleNoise_RandomValue_float (Vector2 uv)
{
float angle = dot(uv, new Vector2(12.9898f, 78.233f));
return float_frac(sin(angle) * 43758.5453f);
}
float Unity_SimpleNnoise_Interpolate_float (float a, float b, float t)
{
return (1.0f - t) * a + (t * b);
}
float Unity_SimpleNoise_ValueNoise_float (Vector2 uv)
{
Vector2 i = floor(uv);
Vector2 f = frac(uv);
f = (f * f) * (new Vector2 (3.0f, 3.0f) - new Vector2(2.0f, 2.0f) * f);
uv = abs(frac(uv) - new Vector2 (0.5f, 0.5f));
Vector2 c0 = i + new Vector2(0.0f, 0.0f);
Vector2 c1 = i + new Vector2(1.0f, 0.0f);
Vector2 c2 = i + new Vector2(0.0f, 1.0f);
Vector2 c3 = i + new Vector2(1.0f, 1.0f);
float r0 = Unity_SimpleNoise_RandomValue_float(c0);
float r1 = Unity_SimpleNoise_RandomValue_float(c1);
float r2 = Unity_SimpleNoise_RandomValue_float(c2);
float r3 = Unity_SimpleNoise_RandomValue_float(c3);
float bottomOfGrid = Unity_SimpleNnoise_Interpolate_float(r0, r1, f.x);
float topOfGrid = Unity_SimpleNnoise_Interpolate_float(r2, r3, f.x);
float t = Unity_SimpleNnoise_Interpolate_float(bottomOfGrid, topOfGrid, f.y);
return t;
}
float UnitySimpleNoiseAtUV(Vector2 UV, float Scale)
{
float t = 0.0f;
float freq = pow(2.0f, 0);
float amp = pow(0.5f, 3-0);
t += Unity_SimpleNoise_ValueNoise_float(new Vector2(UV.x*Scale/freq, UV.y*Scale/freq))*amp;
freq = pow(2.0f, 1);
amp = pow(0.5f, 3-1);
t += Unity_SimpleNoise_ValueNoise_float(new Vector2(UV.x * Scale / freq, UV.y * Scale / freq)) * amp;
freq = pow(2.0f, 2);
amp = pow(0.5f, 3-2);
t += Unity_SimpleNoise_ValueNoise_float(new Vector2(UV.x * Scale / freq, UV.y * Scale / freq)) * amp;
return t;
}
I followed a tutorial online that basically draws a shape on a plane. I want to make only the shape visible, while the background of the plane invisible, so that I can put it onto another plane instead.
I'm extremely new to ThreeJS and GLSL Shaders so I'm completely lost on this, any help is welcome, thank you.
My Fragment Shader:
uniform float time;
uniform float progress;
uniform vec2 mouse;
uniform sampler2D matcap;
uniform vec4 resolution;
varying vec2 vUv;
varying vec3 vPosition;
float PI = 3.141592653589793238;
mat4 rotationMatrix(vec3 axis, float angle) {
axis = normalize(axis);
float s = sin(angle);
float c = cos(angle);
float oc = 1.0 - c;
return mat4(oc * axis.x * axis.x + c, oc * axis.x * axis.y - axis.z * s, oc * axis.z * axis.x + axis.y * s, 0.0,
oc * axis.x * axis.y + axis.z * s, oc * axis.y * axis.y + c, oc * axis.y * axis.z - axis.x * s, 0.0,
oc * axis.z * axis.x - axis.y * s, oc * axis.y * axis.z + axis.x * s, oc * axis.z * axis.z + c, 0.0,
0.0, 0.0, 0.0, 1.0);
}
vec2 getmatcap(vec3 eye, vec3 normal) {
vec3 reflected = reflect(eye, normal);
float m = 2.8284271247461903 * sqrt( reflected.z+1.0 );
return reflected.xy / m + 0.5;
}
vec3 rotate(vec3 v, vec3 axis, float angle) {
mat4 m = rotationMatrix(axis, angle);
return (m * vec4(v, 1.0)).xyz;
}
float smin( float a, float b, float k )
{
float h = clamp( 0.5+0.5*(b-a)/k, 0.0, 1.0 );
return mix( b, a, h ) - k*h*(1.0-h);
}
float sdSphere( vec3 p, float r ){
return length(p)-r;
}
float sdBox( vec3 p, vec3 b ){
vec3 q = abs(p) - b;
return length(max(q,0.0)) + min(max(q.x,max(q.y,q.z)),0.);
}
float rand(vec2 co){
return fract(sin(dot(co.xy, vec2(12.9898, 78.233))) * 43758.5453);
}
float sdf(vec3 p){
vec3 p1 = rotate(p,vec3(1.),time/5.);
float box = smin(sdBox(p1,vec3(0.2)),sdSphere(p,0.2), 0.3);
float realsphere = sdSphere(p1, 0.1);
float final = mix(box,realsphere,progress);
for(float i = 0.; i < 10.; i++) {
float randOffset = rand(vec2(i,0.));
float progr = 1. - fract(time/3. + randOffset*3.);
vec3 pos = vec3(sin(randOffset*2.*PI),cos(randOffset*2.*PI),0.);
float gotoCenter = sdSphere(p-pos*progr, 0.1);
final = smin(final,gotoCenter,0.1);
}
float mouseSphere = sdSphere(p - vec3(mouse*2.,0.),0.1);
return smin(final, mouseSphere, 0.1);
}
vec3 calcNormal( in vec3 p )
{
const float eps = 0.0001;
const vec2 h = vec2(eps,0);
return normalize( vec3(sdf(p+h.xyy) - sdf(p-h.xyy),
sdf(p+h.yxy) - sdf(p-h.yxy),
sdf(p+h.yyx) - sdf(p-h.yyx) ) );
}
void main() {
float dist = length(vUv - vec2(0.5));
vec3 bg = mix(vec3(0.3),vec3(0.0), dist);
vec3 camPos = vec3(0.,0.,2.);
vec3 ray = normalize(vec3(vUv - vec2(0.5),-1));
vec3 rayPos = camPos;
float t = 0.;
float tMax = 5.;
for(int i = 0; i < 256; i++) {
vec3 pos = camPos + t*ray;
float h = sdf(pos);
if(h<0.0001 || t>tMax) break;
t+=h;
}
vec3 color = bg;
if(t<tMax){
vec3 pos = camPos + t*ray;
color = vec3(1.);
vec3 normal = calcNormal(pos);
color = normal;
float diff = dot(vec3(1.),normal);
vec2 matcapUV = getmatcap(ray,normal);
color = vec3(diff);
color = texture2D(matcap,matcapUV).rgb;
float fresnel = pow(1. + dot(ray,normal),3.);
color = mix(color,bg,fresnel);
}
gl_FragColor = vec4(color,1.);
}
The problem is that when I tried converting height map to normal map. The results are wrong. For some reason there is 3 light sources that is emitting from top (green), right (red), and left (blue) in the texture.
This is the GeoMath.hlsl code that I am using
static const float PI = 3.141592653589793238462643383279;
float2 longitudeLatitudeToUV(float2 longLat) {
float longitude = longLat[0];
float latitude = longLat[1];
float u = longitude / (2 * PI) + 0.5;
float v = latitude / PI + 0.5;
return float2(u,v);
}
float3 longitudeLatitudeToPoint(float2 longLat) {
float longitude = longLat[0];
float latitude = longLat[1];
float x;
float y;
float z;
y = sin(latitude);
float r = cos(latitude);
x = sin(longitude) * r;
z = -cos(longitude) * r;
return float3(x, y, z);
}
float2 uvToLongitudeLatitude(float2 uv) {
float longitude = (uv.x - 0.5) * (2 * PI);
float latitude = (uv.y - 0.5) * PI;
return float2(longitude, latitude);
}
float2 pointToLongitudeLatitude(float3 p) {
float longitude = atan2(p.x, p.z);
float latitude = asin(p.y);
return float2(longitude, latitude);
}
float2 pointToUV(float3 p) {
p = normalize(p);
return longitudeLatitudeToUV(pointToLongitudeLatitude(p));
}
This is the compute shader I am using to convert height map into normal map.
#pragma kernel CSMain
#include "GeoMath.hlsl"
Texture2D<float> _HeightMap;
RWTexture2D<float4> _NormalMap;
int _TextureSize_Width;
int _TextureSize_Height;
float _WorldRadius;
float _HeightMultiplier;
float3 CalculateWorldPoint(uint2 texCoord)
{
float2 uv = texCoord / float2(_TextureSize_Width - 1, _TextureSize_Height - 1);
float2 longLat = uvToLongitudeLatitude(uv);
float3 spherePoint = longitudeLatitudeToPoint(longLat);
float height01 = _HeightMap[texCoord].r + 1.0;
float worldHeight = _WorldRadius + height01 * _HeightMultiplier;
return spherePoint * worldHeight;
}
uint2 WrapIndex(uint2 texCoord)
{
texCoord.x = (texCoord.x + _TextureSize_Width) % _TextureSize_Width;
texCoord.y = max(min(_TextureSize_Height - 1, texCoord.y), 0);
return texCoord;
}
[numthreads(8,8,1)]
void CSMain (uint3 id : SV_DispatchThreadID)
{
float3 normalVector;
float3 posNorth = CalculateWorldPoint(WrapIndex(id.xy + uint2(0, 1)));
float3 posSouth = CalculateWorldPoint(WrapIndex(id.xy + uint2(0, -1)));
float3 posEast = CalculateWorldPoint(WrapIndex(id.xy + uint2(1, 0)));
float3 posWest = CalculateWorldPoint(WrapIndex(id.xy + uint2(-1, 0)));
float3 dirNorth = normalize(posNorth - posSouth);
float3 dirEast = normalize(posEast - posWest);
normalVector = normalize(cross(dirNorth, dirEast));
_NormalMap[id.xy] = float4(normalVector, 1.0);
}
And this is the result I am getting is down below height map (top), generated normal map from the code above (bottom)
I believe that you are trying to get object space normals.
But there is tiny detail is missing.
Possible values for normalized vector3 are -1..1 for each axis.
And possible values for pixel: 0..1.
You just need to adjust ranges.
This line roughly fixes problem:
_NormalMap[id.xy] = float4(normalVector / 2 + float3(0.5, 0.5, 0.5), 1.0);
Result
I need a little help,
I have this Perlin noise function, but I don't know how to properly create offsets.
I am using this to create infinite terrain generation and when I use this script it the noise values of individual chunks don't fit together properly. And they create holes.
Is there a way of fixing this ?
public float[,] GenerateNoise(int chunkSize, int octaves, string seed, float noiseScale, float persistence, float lacunarity, Vector2 offset)
{
if (noiseScale <= 0)
{
noiseScale = 0.0001f;
}
float halfWidth = chunkSize / 2f;
float halfHeight = chunkSize / 2f;
float[,] noiseMap = new float[chunkSize, chunkSize];
System.Random rand = new System.Random(seed.GetHashCode());
//Octaves offset
Vector2[] octavesOffset = new Vector2[octaves];
for (int i = 0; i < octaves; i++)
{
float offset_X = rand.Next(-100000, 100000) + offset.x;
float offset_Y = rand.Next(-100000, 100000) + offset.y;
octavesOffset[i] = new Vector2(offset_X / chunkSize , offset_Y / chunkSize);
}
for (int x = 0; x < chunkSize; x++)
{
for (int y = 0; y < chunkSize; y++)
{
float amplitude = 1;
float frequency = 1;
float noiseHeight = 0;
float superpositionCompensation = 0;
for (int i = 0; i < octaves; i++)
{
float sampleX = (x - halfWidth) / noiseScale * frequency + octavesOffset[i].x * frequency;
float sampleY = (y - halfHeight) / noiseScale * frequency + octavesOffset[i].y * frequency;
float noiseValue = Mathf.PerlinNoise(sampleX, sampleY);
noiseHeight += noiseValue * amplitude;
noiseHeight -= superpositionCompensation;
amplitude *= persistence;
frequency *= lacunarity;
superpositionCompensation = amplitude / 2;
}
noiseMap[x, y] = Mathf.Clamp01(noiseHeight);
}
}
return noiseMap;
}
It is quite simple actually, just add the chunk x,y coordinates to Mathf.PerlinNoise. Taking your code as an example, you can:
Pass chunkPosition as an argument to it:
public float[,] GenerateNoise(Vector2 chunkPos, int chunkSize, int octaves, string seed, float noiseScale, float persistence, float lacunarity, Vector2 offset)
Add it to Mathf.PerlinNoise invocation:
float noiseValue = Mathf.PerlinNoise(sampleX + chunkPos.x, sampleY + chunkPos.y);
Then make sure to generate each chunk with an appropriate chunkPos, where chunkPos can be its transform.position or whatever coordinates you have.
That's it.
There is a code for a drawing circle with LineRenderer.
but I want to draw multiple circles with different radius, I used "for loop" but there is one circle instead of multiple
public float ThetaScale = 0.01f;
public float radius = 3f;
private int Size;
private LineRenderer LineDrawer;
private float Theta = 0f;
void Start ()
{
LineDrawer = GetComponent<LineRenderer>();
}
void Update ()
{
Theta = 0f;
Size = (int)((1f / ThetaScale) + 1f);
LineDrawer.SetVertexCount(Size);
for (int l = 0; l < 5; l++)
{
for(int i = 0; i < Size; i++)
{
Theta += (2.0f * Mathf.PI * ThetaScale);
float x = l * radius * Mathf.Cos(Theta);
float y = l * radius * Mathf.Sin(Theta);
LineDrawer.SetPosition(i, new Vector3(x, 0, y));
}
}
}
In every loop you always overwrite the same positions indices in the same line renderer. So you will always only have the last circle.
Note that it is also quite expensive to use SetPoisition repeatedly. As it says in the API you should rather work on an array and then use SetPoisitions to assign all positions at once.
One thing is a bit unclear though: If you use one single LineRenderer you won't get independent circles but they will always be connected at some point. Otherwise you would need 5 separated LineRenderer instances.
Option A: 5 circles but connected to each other since part of a single LineRenderer
void Start ()
{
LineDrawer = GetComponent<LineRenderer>();
LineDrawer.loop = false;
Theta = 0f;
// Use one position more to close the circle
Size = (int)((1f / ThetaScale) + 1f) + 1;
LineDrawer.positionCount = 5 * Size;
var positions = new Vector3[5 * Size];
for (int l = 0; l < 5; l++)
{
for(int i = 0; i < Size; i++)
{
Theta += (2.0f * Mathf.PI * ThetaScale);
float x = l * radius * Mathf.Cos(Theta);
float y = l * radius * Mathf.Sin(Theta);
positions[5 * l + i] = new Vector3(x, 0, y);
}
}
LineDrawer.SetPositions(positions);
}
Option B: 5 separated circles in 5 separated LineRenderers
// Drag 5 individual LineRenderer here via the Inspector
public LineRenderer[] lines = new LineRenderer[5];
void Start ()
{
foreach(var line in lines)
{
line.loop = true;
Theta = 0f;
Size = (int)((1f / ThetaScale) + 1f);
line.positionCount = Size;
var positions = new Vector3[Size];
for(int i = 0; i < Size; i++)
{
Theta += (2.0f * Mathf.PI * ThetaScale);
float x = l * radius * Mathf.Cos(Theta);
float y = l * radius * Mathf.Sin(Theta);
positions[5 * l + i] = new Vector3(x, 0, y);
}
line.SetPositions(positions);
}
}
You missed few details here and there. Here, this will work:
using UnityEngine;
[ExecuteAlways]
[RequireComponent( typeof(LineRenderer) )]
public class CircularBehaviour : MonoBehaviour
{
[SerializeField][Min(3)] int _numSegments = 16;
[SerializeField][Min(1)] int _numCircles = 5;
[SerializeField] float _radius = 3f;
LineRenderer _lineRenderer;
void Awake ()
{
_lineRenderer = GetComponent<LineRenderer>();
_lineRenderer.loop = false;
_lineRenderer.useWorldSpace = false;
}
void Update ()
{
const float TAU = 2f * Mathf.PI;
float theta = TAU / (float)_numSegments;
int numVertices = _numSegments + 1;
_lineRenderer.positionCount = numVertices * _numCircles;
int vert = 0;
for( int l=1 ; l<=_numCircles ; l++ )
{
float r = _radius * (float)l;
for( int i=0 ; i<numVertices ; i++ )
{
float f = theta * (float)i;
Vector3 v = new Vector3{ x=Mathf.Cos(f) , y=Mathf.Sin(f) } * r;
_lineRenderer.SetPosition( vert++ , v );
}
}
}
}
But
as #derHugo explained, this is not what you're looking for exactly as all circles will be drawn connected.