modulo to cycle though a compute buffer not working as expected - unity3d

The Setup
In my compute-shader I have a StructuredBuffer that is storing an amount of colors. There is also an int variable storing the amount of colors in total ( _colres ). From another script a node tree is dispatched into the shader every frame. The amount of nodes changes dynamically. Because of that the points buffer containing the nodes is at a fixed size of 8192 that the amount of nodes never exceeds.
The Problem
When I'm now trying to draw the points I am storing in the points buffer, oddly enough only every third color is displayed, starting at index [0] (tested for up to 12 colors -> [0],[3],[6],[9]).
Result[pointsBuffer[id.x].xy] = colorsBuffer[id.x % _colres];
What I tried
I used the fmod() function instead but was presented with the same result. Individually targeting stored colors by hard-coding the index has worked so my guess would be that the colors buffer is not the problem. Maybe it has something to do with all the empty spaces in the pointsbuffer but I could't figure it out.
The Question(s)
Is there a fundamental problem I am overlooking?
Is there some other simple way to cycle through the indices of my colorsbuffer that works in this scenario?
Detailed Information
System:
Unity Version 2021.2.12f1 using the HDRP on MacOS Monterey 12.2.1
Compute-Shader
#pragma kernel DrawPoints
// texture
shared RWTexture2D<float4> Result;
int _texres;
int _colres;
// buffer
StructuredBuffer<float2> pointsBuffer;
StructuredBuffer<float4> colorsBuffer;
[numthreads(64,1,1)]
void DrawPoints (uint3 id : SV_DispatchThreadID)
{
if ((pointsBuffer[id.x].x * pointsBuffer[id.x].y) > 0) Result[pointsBuffer[id.x].xy] = colorsBuffer[id.x % _colres];
}
C# Setup
public differentialGrowth diffGrowth;
int texResolution = 4096;
int colorAmount = 12;
RenderTexture settingRef;
Material target;
ComputeShader shader;
RenderTexture outputTexture;
ComputeBuffer pointsBuffer;
ComputeBuffer colorsBuffer;
int pointsHandle;
void Start()
{
outputTexture = new RenderTexture(settingRef);
outputTexture.enableRandomWrite = true;
outputTexture.Create();
// INIT
pointsHandle = shader.FindKernel("DrawPoints");
shader.SetInt("_texres", texResolution);
shader.SetInt("_colres", colorAmount);
int stride = (3) * 4; // every component as a float (3) * 4 bytes per float
pointsBuffer = new ComputeBuffer(8192, stride);
stride = (4) * 4;
colorsBuffer = new ComputeBuffer(colorAmount, stride);
shader.SetTexture( pointsHandle, "Result", outputTexture );
target.SetTexture("_MainTex", outputTexture);
Color[] testColors = new Color[colorAmount];
testColors[0] = new Color(1, 0, 0, 0); //red _ yes
testColors[1] = new Color(0, 1, 0, 0); //green
testColors[2] = new Color(0, 0, 1, 0); //blue
testColors[3] = new Color(1, 1, 0, 0); //yellow _yes
testColors[4] = new Color(0, 1, 1, 0); //cyan
testColors[5] = new Color(1, 0, 1, 0); //magenta
testColors[6] = new Color(0.5f, 0, 1, 0); //mix6 _yes
testColors[7] = new Color(1, 0.5f, 1, 0); //mix7
testColors[8] = new Color(0.5f, 0, 1, 0); //mix8
testColors[9] = new Color(0.5f, 0.5f, 1, 0); //mix9 _yes
testColors[10] = new Color(0.5f, 0.5f, 1, 0); //mix10
testColors[11] = new Color(0.5f, 0.5f, 1, 0); //mix11
}
void Update()
{
pointsBuffer.SetData(diffGrowth.nodes.Points);
shader.SetBuffer(pointsHandle, "colorsBuffer", colorsBuffer);
shader.SetBuffer(pointsHandle, "pointsBuffer", pointsBuffer);
shader.Dispatch(pointsHandle, 128, 1, 1);
}
private void OnDestroy()
{
if (pointsBuffer != null) pointsBuffer.Dispose();
if (colorsBuffer != null) colorsBuffer.Dispose();
}
}

Related

Unity: How to Create a projection such that the local coordinates match the monitor coordinates

I'm learning about mesh rendering in unity.
I followed the documentation and was able to draw a blue quad on the screen.
However I can't figure out how render thinng such that the entire monitor is used.
I think what I'm missing is setting some sort of projections for this coordinate system to match. But how can I do that? Ideally I would like to do:
x_start = 0;
x_end = W;
y_start = 0;
y_end = H;
And have this quad cover the entire screen.
Here is the code:
using System.IO;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.XR;
public class MeshRendererTest : MonoBehaviour
{
// Start is called before the first frame update
static Material mat;
float z;
float W;
float H;
Matrix4x4 projectionMatrix;
void Start(){
W = 2480.0f;
H = 2416.0f;
//projectionMatrix = Matrix4x4.Ortho(0, (int)W, (int)H, 0, -1, 100);
Shader shader = Shader.Find("Hidden/Internal-Colored");
//Shader shader2 = Shader.Find("Standard");
//Shader shader2 = Shader.Find("Unlit/UnlitAlphaWithFade");
mat = new Material(shader);
mat.hideFlags = HideFlags.HideAndDontSave;
//texMat.hideFlags = HideFlags.HideAndDontSave;
// Turn backface culling off
mat.SetInt("_Cull", (int)UnityEngine.Rendering.CullMode.Off);
//texMat.SetInt("_Cull", (int)UnityEngine.Rendering.CullMode.Off);
// Turn off depth writes
mat.SetInt("_ZWrite", 0);
//texMat.SetInt("_ZWrite", 0);
z = 999.0f;
mat.SetColor("_Color", Color.blue);
mat.SetPass(0);
MeshRenderer meshRenderer = gameObject.AddComponent<MeshRenderer>();
if (meshRenderer == null){
Debug.Log("Mesh Renderer is NUll");
return;
}
//meshRenderer.sharedMaterial = new Material(Shader.Find("Standard"));
meshRenderer.sharedMaterial = mat;
MeshFilter meshFilter = gameObject.AddComponent<MeshFilter>();
if (meshFilter == null){
Debug.Log("Mesh Filter is NUll");
return;
}
Mesh mesh = new Mesh();
float x_start = 0;
float x_end = x_start + 100;
float y_start = 0;
float y_end = y_start + 600;
Vector3[] vertices = new Vector3[4]
{
new Vector3(x_start, y_start, z),
new Vector3(x_end, y_start, z),
new Vector3(x_start, y_end, z),
new Vector3(x_end, y_end, z)
};
mesh.vertices = vertices;
int[] tris = new int[6]
{
// lower left triangle
0, 2, 1,
// upper right triangle
2, 3, 1
};
mesh.triangles = tris;
Vector3[] normals = new Vector3[4]
{
-Vector3.forward,
-Vector3.forward,
-Vector3.forward,
-Vector3.forward
};
mesh.normals = normals;
Vector2[] uv = new Vector2[4]
{
new Vector2(0, 0),
new Vector2(1, 0),
new Vector2(0, 1),
new Vector2(1, 1)
};
mesh.uv = uv;
meshFilter.mesh = mesh;
}
void OnRenderObject(){
TestStuff();
}
void TestStuff(){
Camera.main.ResetProjectionMatrix();
Matrix4x4 newProj = Matrix4x4.identity;
newProj = newProj * transform.localToWorldMatrix;
newProj = newProj * Camera.main.projectionMatrix;
newProj = newProj * projectionMatrix;
Camera.main.projectionMatrix = newProj;
}
}

Bad usage of Physics.OverlapBox

I don't understand the usage of method Physics.OverlapBox.
I want to put ten walls on a 4x4 plane in my scene. The width and location of the walls is calculated randomly. In the odd number of cycles in the creation loop the wall is rotated 90 degrees.
The walls presented in the scene should not collide with other walls... but it's not working.
void Reset()
{
int walls = 10;
for (int w = 0; w < walls; w++)
{
for (int i = 0; i < 5; i++)
{
float x = Random.Range(-20f, 20f);
float z = Random.Range(-20f, 20f);
Vector3 center = new Vector3(x, 1.51f, z);
int scalex = Random.Range(4, 13);
Quaternion quaternion = Quaternion.identity;
if (w % 2 == 1)
quaternion=Quaternion.Euler(0, 0, 0);
else
quaternion=Quaternion.Euler(0, 90, 0);
Collider[] colliders = Physics.OverlapBox(center, new Vector3(scalex, 3, 1) / 2, quaternion);
Debug.Log(colliders.Length);
if (colliders.Length == 0)
{
GameObject wall = GameObject.CreatePrimitive(PrimitiveType.Cube);
wall.transform.position = center;
wall.transform.localScale = new Vector3(scalex, 3, 1);
wall.transform.rotation = quaternion;
wall.tag = "wall";
break;
}
}
}
}
After your comment I think I now know what the issue is:
The physics engine simply doesn't "know" your walls colliders yet since the physics engine is updated in the next FixedUpdate.
You might want to call Physics.Simulate after each wall in order to manually trigger a physics update.
The docs are a bit unclear if it is necessary but you might have to disable Physics.autoSimulation before the loop and turn it back on after you are finished with the wall creation.
The solution proposed by derHugo (a lot of thanks) works from Start method. I added rotation to the walls. No collisions.
The code:
public void Start()
{
Physics.autoSimulation = false;
for (int i = 0; i < 200; i++)
{
createWall();
Physics.Simulate(Time.fixedDeltaTime);
}
Physics.autoSimulation = true;
}
void createWall()
{
float x = Random.Range(-20f, 20f);
float z = Random.Range(-20f, 20f);
Vector3 position = new Vector3(x, 1.51f, z);
Quaternion rotation = Quaternion.Euler(0, Random.Range(0,360), 0);
int scalex = Random.Range(2, 5);
Collider[] colliders = Physics.OverlapBox(position, new Vector3(scalex, 1, 1)/2, rotation);
if (colliders.Length==0)
{
GameObject wall = GameObject.CreatePrimitive(PrimitiveType.Cube);
wall.transform.localPosition = position;
wall.transform.localScale = new Vector3(scalex, 3, 1);
wall.transform.rotation = rotation;
wall.tag = "wall";
}
}

How to release memory in Unity?

I see that when I create mesh and textures in Unity on each frame (30fps) feels like Unity doesn't release these data from memory after the usage.
There is my code
private bool UpdateFrame(int frameIdx)
{
bool result = true;
int readyBuffSize = DecoderAPI.stream_get_ready_buffer_size(m_stream);
if (m_currMeshFrameIndex != frameIdx
&& readyBuffSize > 0)
{
m_currMeshFrameIndex = frameIdx;
IntPtr frame = DecoderAPI.stream_get_next_frame_obj(m_stream);
if (frame == IntPtr.Zero)
{
result = false;
}
else
{
long sequentialFrameIdx = DecoderAPI.get_sequential_number(frame);
DebugMethod("UNITY UpdateFrame", $"readyBuffSize :: {readyBuffSize}");
DebugMethod("UNITY UpdateFrame", $"sequentialFrameIdx :: {sequentialFrameIdx}");
Mesh releaseFormer = m_meshFilter.mesh;
m_meshFilter.mesh = CrteateMesh(frame);
Texture2D texture = CreateTexture(frame);
m_meshRenderer.material.SetTexture("_MainTex", texture);
DecoderAPI.stream_release_frame_obj(m_stream, frame);
Destroy(releaseFormer); // does not seem to help: even when there are no more allocations in C++ the process grows endlessly
}
}
return result;
}
private Mesh CrteateMesh(IntPtr frame)
{
Mesh mesh = new Mesh();
//Vertices***
int vertexCount = DecoderAPI.frame_get_vertex_count(frame);
byte[] xyzBytes = new byte[vertexCount * 3 * 4];
IntPtr xyz = DecoderAPI.frame_get_vertex_xyz(frame);
Vector3[] vertices = new Vector3[vertexCount];
GCHandle handle = GCHandle.Alloc(vertices, GCHandleType.Pinned);
IntPtr pointer = handle.AddrOfPinnedObject();
Marshal.Copy(xyz, xyzBytes, 0, xyzBytes.Length);
Marshal.Copy(xyzBytes, 0, pointer, xyzBytes.Length);
handle.Free();
mesh.vertices = vertices;
//***
//Faces***
int faceCount = DecoderAPI.frame_face_count(frame);
int trisArrSize = faceCount * 3;
int[] tris = new int[trisArrSize];
IntPtr indices = DecoderAPI.frame_face_indices(frame);
Marshal.Copy(indices, tris, 0, trisArrSize);
mesh.triangles = tris;
//***
mesh.RecalculateNormals();
//UV***
int uvCount = DecoderAPI.frame_get_uv_count(frame);
IntPtr uvData = DecoderAPI.frame_get_uv_data(frame);
int uvArrSize = uvCount * 2;
float[] uvArr = new float[uvArrSize];
Vector2[] uv = new Vector2[uvCount];
Marshal.Copy(uvData, uvArr, 0, uvArrSize);
for (int i = 0; i < uvCount; i++)
{
Vector2 result = new Vector2(uvArr[i * 2], uvArr[i * 2 + 1]) * new Vector2(1, -1);
uv[i] = result;
}
mesh.uv = uv;
//***
if (vertexCount != uvCount)
{
long frameId = DecoderAPI.get_sequential_number(frame);
DebugMethod("UNITY CrteateMesh", $"HERE : in frame id :: {frameId}, vertexCount : {vertexCount}, uvCount : {uvCount}");
}
return mesh;
}
private Texture2D CreateTexture(IntPtr frame)
{
IntPtr textureObj = DecoderAPI.frame_get_texture_obj(frame);
DecoderAPI.TextureInfo textureInfo = DecoderAPI.texture_get_info(textureObj);
int width = textureInfo.width;
int height = textureInfo.height;
int channels = textureInfo.channels;
int stride = textureInfo.stride;
//DecoderAPI.ColorType colorType = textureInfo.color_type;
IntPtr pixels = textureInfo.pixels;
Texture2D texture = new Texture2D(width, height, TextureFormat.RGB24, false);
//Texture2D texture = new Texture2D(width, height, TextureFormat.DXT5, false);
texture.LoadRawTextureData(pixels, width * channels * height);
texture.Apply();
return texture;
}
So, what I do is - I create a mesh and texture for each frame use it and then I expect that Unity should release them from memory after the usage, but no. Ok, I found like this method Destroy(releaseFormer) should help, but anyway it is the same I see in TaskManager that memory grows endlessly...
For test I have tried -> I start my c++ code generate (let's say 100 frames) then I stop it (so my c++ doesn't allocate nothing) and I still see that memory grows up to the end. What I expect is - ok even if Unity doesn't release data that I don't need more, I loaded 100 frames that is it, why memory continues to grow?
Question is - how to release from memory all that frames that I don't need?
EDIT
I have changed this method, added Destroy in proper order
private bool UpdateFrame(int frameIdx)
{
bool result = true;
int readyBuffSize = -1;
if (m_stream != IntPtr.Zero)
{
readyBuffSize = DecoderAPI.stream_get_ready_buffer_size(m_stream);
}
if (m_currMeshFrameIndex != frameIdx
&& readyBuffSize > 0)
{
m_currMeshFrameIndex = frameIdx;
IntPtr frame = DecoderAPI.stream_get_next_frame_obj(m_stream);
if (frame == IntPtr.Zero)
{
result = false;
}
else
{
long sequentialFrameIdx = DecoderAPI.frame_get_sequential_number(frame);
DebugMethod("UNITY UpdateFrame", $"readyBuffSize :: {readyBuffSize}");
DebugMethod("UNITY UpdateFrame", $"sequentialFrameIdx :: {sequentialFrameIdx}");
if (m_meshFilter.mesh != null)
{
Destroy(m_meshFilter.mesh);
}
m_meshFilter.mesh = CrteateMesh(frame);
if (m_texture != null)
{
Destroy(m_texture);
}
m_texture = CreateTexture(frame);
m_meshRenderer.material.SetTexture("_MainTex", m_texture);
if (m_stream != IntPtr.Zero)
{
DecoderAPI.stream_release_frame_obj(m_stream, frame);
}
}
}
return result;
}
releaseFormer is the mesh right? Did you try calling Destroy on the texture object itself?
Another thread suggested Resources.UnloadUnusedAssets()
Personally I'd be trying to do this with a RenderTexture especially if the texture size doesn't change too often, though might not be possible for your use case

how to apply texture on 3d model, dynamically?

I have a file where written array of vertexes, indexes, uv, textures and so on, in two words everything in order to draw model with texture on it. For example it is should be 3d cube with texture like wood.
So, what I have for now is - I can present vertexes of cube(I see my model), but I don't know how to apply a texture for this.
there is my code -
public void Start()
{
m_stream = DecoderAPI.create_stream_decoder_obj();
string pathToFile = "path_to_my_file";
bool isInitialized = DecoderAPI.stream_init_model(m_stream, pathToFile);
if (isInitialized)
{
m_curFrame = DecoderAPI.stream_get_frame_obj(m_stream, 1);
MeshRenderer meshRenderer = gameObject.AddComponent<MeshRenderer>();
meshRenderer.sharedMaterial = new Material(Shader.Find("Standard"));
Mesh mesh = new Mesh();
//Vertices***
int vertexCount = DecoderAPI.frame_get_vertex_count(m_curFrame);
int xyzArrSize = vertexCount * 3;
float[] xyzArray = new float[xyzArrSize];
IntPtr xyz = DecoderAPI.frame_get_vertex_xyz(m_curFrame);
Marshal.Copy(xyz, xyzArray, 0, xyzArrSize);
Vector3[] vertices = new Vector3[vertexCount];
for (int i = 0; i < vertexCount; i++)
{
vertices[i] = new Vector3(xyzArray[i * 3], xyzArray[i * 3 + 1], xyzArray[i * 3 + 2]);
}
mesh.vertices = vertices;
//***
//Faces***
int faceCount = DecoderAPI.frame_face_count(m_curFrame);
int trisArrSize = faceCount * 3;
int[] tris = new int[trisArrSize];
IntPtr indices = DecoderAPI.frame_face_indices(m_curFrame);
Marshal.Copy(indices, tris, 0, trisArrSize);
mesh.triangles = tris;
//***
mesh.RecalculateNormals();
MeshFilter meshFilter = gameObject.AddComponent<MeshFilter>();
meshFilter.mesh = mesh;
//TEXTURE ****
int uvCount = DecoderAPI.frame_get_uv_count(m_curFrame);
IntPtr uvData = DecoderAPI.frame_get_uv_data(m_curFrame);
IntPtr textureObj = DecoderAPI.frame_get_texture_obj(m_curFrame);
DecoderAPI.TextureInfo textureInfo = DecoderAPI.texture_get_info(textureObj);
int width = textureInfo.width;
int height = textureInfo.height;
int channels = textureInfo.channels;
int stride = textureInfo.stride;
DecoderAPI.ColorType color_type = textureInfo.color_type;
IntPtr pixels = textureInfo.pixels;
HOW TO APPLY THIS TEXTURE DATA TO MY MODEL????
//***
DecoderAPI.frame_release(m_curFrame);
}
}
I found this answer - https://answers.unity.com/questions/390878/how-do-i-apply-a-texture-to-a-3d-model.html
but I need to know to apply it dynamically
Any suggestions? Or maybe some thinks to tutorials?
EDIT
public void Start()
{
m_stream = DecoderAPI.create_stream_decoder_obj();
string pathToFile = "my_path_to_file";
bool isInitialized = DecoderAPI.stream_init_model(m_stream, pathToFile);
if (isInitialized)
{
m_curFrame = DecoderAPI.stream_get_frame_obj(m_stream, 1);
MeshRenderer meshRenderer = gameObject.AddComponent<MeshRenderer>();
meshRenderer.sharedMaterial = new Material(Shader.Find("Standard"));
Mesh mesh = new Mesh();
//Vertices***
int vertexCount = DecoderAPI.frame_get_vertex_count(m_curFrame);
int xyzArrSize = vertexCount * 3;
float[] xyzArray = new float[xyzArrSize];
IntPtr xyz = DecoderAPI.frame_get_vertex_xyz(m_curFrame);
Marshal.Copy(xyz, xyzArray, 0, xyzArrSize);
Vector3[] vertices = new Vector3[vertexCount];
for (int i = 0; i < vertexCount; i++)
{
vertices[i] = new Vector3(xyzArray[i * 3], xyzArray[i * 3 + 1], xyzArray[i * 3 + 2]);
}
mesh.vertices = vertices;
//***
//Faces***
int faceCount = DecoderAPI.frame_face_count(m_curFrame);
int trisArrSize = faceCount * 3;
int[] tris = new int[trisArrSize];
IntPtr indices = DecoderAPI.frame_face_indices(m_curFrame);
Marshal.Copy(indices, tris, 0, trisArrSize);
mesh.triangles = tris;
//***
mesh.RecalculateNormals();
//UV***
int uvCount = DecoderAPI.frame_get_uv_count(m_curFrame);
IntPtr uvData = DecoderAPI.frame_get_uv_data(m_curFrame);
int uvArrSize = uvCount * 2;
float[] uvArr = new float[uvArrSize];
Vector2[] uv = new Vector2[uvCount];
Marshal.Copy(uvData, uvArr, 0, uvArrSize);
for(int i = 0; i < uvCount; i++)
{
uv[i] = new Vector2(uvArr[i * 2], uvArr[i * 2 + 1]);
}
mesh.uv = uv;
//***
MeshFilter meshFilter = gameObject.AddComponent<MeshFilter>();
meshFilter.mesh = mesh;
//TEXTURE ****
IntPtr textureObj = DecoderAPI.frame_get_texture_obj(m_curFrame);
DecoderAPI.TextureInfo textureInfo = DecoderAPI.texture_get_info(textureObj);
int width = textureInfo.width;
int height = textureInfo.height;
int channels = textureInfo.channels;
int stride = textureInfo.stride;
DecoderAPI.ColorType color_type = textureInfo.color_type;
IntPtr pixels = textureInfo.pixels;
Texture2D texture = new Texture2D(width, height);
texture.LoadRawTextureData(pixels, width * channels *height);
texture.Apply();
meshRenderer.material.SetTexture("_MainText", texture);
//***
DecoderAPI.frame_release(m_curFrame);
}
}
But for now I am getting such an error
UnityException: LoadRawTextureData: not enough data provided (will result in overread).
UnityEngine.Texture2D.LoadRawTextureData (System.IntPtr data, System.Int32 size) (at <a9810827dce3444a8e5c4e9f3f5e0828>:0)
Model.Start () (at Assets/Scripts/Model.cs:98)
What am I doing wrong?
First off, your code to construct the cube is missing the UV part, so even if you assign the texture to the material the result is undetermined. Look at the code samples in Mesh manual page about adding the UV as well: https://docs.unity3d.com/ScriptReference/Mesh.html
Once you have the UV, all you have to do is to set the texture using SetTexture (see https://docs.unity3d.com/ScriptReference/Material.SetTexture.html).
On a separate note, in your code, you are using Shared Material instead of Material: that is not advisable unless you have many objects all using the same material and you want to change them all.
EDIT:
To get a texture from a pixels buffer you create a Texture2D object of the given size and colour type, then you apply the data like this:
myTexture.LoadRawTextureData(myPixels);
myTexture.Apply();
~Pino
If you have a texture and a MeshRenderer, it works like this:
void SetYourTexture()
{
MeshRenderer yourMeshRenderer = GetComponent<MeshRenderer>();
//If you only need one texture at the material unity understand _MainText as the mainTexture;
yourMeshRenderer.material.SetTexture("_MainText", yourTexture);
}

How to use " 9-sliced" image type in non-GUI elements?

In my game, I am using simple textures on quads. I could see Unity GUI allows us to slice the image by setting "image type" option as sliced.
I would like to do the same to my other textures I use in quads.
In simple words, I dont want the edges of my textures to be scaled when the texture itself is scaled.
Thanks.
Looks like you have to generate the mesh yourself, not so hard as it sounds. The mesh has to look like this:
Each textured mesh is defined by vertices (red dots on the picture) and (in this case) each vertex has two parameters:
A Vector3 with position. Only x and y used here, as a quad is flat (so, z = 0).
A Vector2 with the UV, that is how the material is put on the mesh. This are the texture coordinates: u and v.
To configure this mesh, I added parameters:
b - for border - how big is the border of the gameobject (in 3d units)
w, h - for width and height of the gameobject (in 3d units)
m - how big is the margin in the image (in float 0f to 0.5f)
How to do this in Unity3d:
Create an GameObject, add a MeshRenderer with the material you want and an empty MeshFilter.
Add a script that creates a mesh. Look at Unity3d docs for Mesh, first example.
Note that for unity3d you have to create this with triangles, not quads (so for each quad make two triangles).
Chanibal's solutions worked like a charm for me, but since I wasn't an expert in the matter, it wasn't trivial to me to implement this. I will let this code here in case somebody could find it useful.
using UnityEngine;
public class SlicedMesh : MonoBehaviour
{
private float _b = 0.1f;
public float Border
{
get
{
return _b;
}
set
{
_b = value;
CreateSlicedMesh();
}
}
private float _w = 1.0f;
public float Width
{
get
{
return _w;
}
set
{
_w = value;
CreateSlicedMesh();
}
}
private float _h = 1.0f;
public float Height
{
get
{
return _h;
}
set
{
_h = value;
CreateSlicedMesh();
}
}
private float _m = 0.4f;
public float Margin
{
get
{
return _m;
}
set
{
_m = value;
CreateSlicedMesh();
}
}
void Start()
{
CreateSlicedMesh();
}
void CreateSlicedMesh()
{
Mesh mesh = new Mesh();
GetComponent<MeshFilter>().mesh = mesh;
mesh.vertices = new Vector3[] {
new Vector3(0, 0, 0), new Vector3(_b, 0, 0), new Vector3(_w-_b, 0, 0), new Vector3(_w, 0, 0),
new Vector3(0, _b, 0), new Vector3(_b, _b, 0), new Vector3(_w-_b, _b, 0), new Vector3(_w, _b, 0),
new Vector3(0, _h-_b, 0), new Vector3(_b, _h-_b, 0), new Vector3(_w-_b, _h-_b, 0), new Vector3(_w, _h-_b, 0),
new Vector3(0, _h, 0), new Vector3(_b, _h, 0), new Vector3(_w-_b, _h, 0), new Vector3(_w, _h, 0)
};
mesh.uv = new Vector2[] {
new Vector2(0, 0), new Vector2(_m, 0), new Vector2(1-_m, 0), new Vector2(1, 0),
new Vector2(0, _m), new Vector2(_m, _m), new Vector2(1-_m, _m), new Vector2(1, _m),
new Vector2(0, 1-_m), new Vector2(_m, 1-_m), new Vector2(1-_m, 1-_m), new Vector2(1, 1-_m),
new Vector2(0, 1), new Vector2(_m, 1), new Vector2(1-_m, 1), new Vector2(1, 1)
};
mesh.triangles = new int[] {
0, 4, 5,
0, 5, 1,
1, 5, 6,
1, 6, 2,
2, 6, 7,
2, 7, 3,
4, 8, 9,
4, 9, 5,
5, 9, 10,
5, 10, 6,
6, 10, 11,
6, 11, 7,
8, 12, 13,
8, 13, 9,
9, 13, 14,
9, 14, 10,
10, 14, 15,
10, 15, 11
};
}
}
Here is another impementation of the same mesh as described above(just add Tile9Mesh script to empty GameObject and it should work
):
using UnityEngine;
namespace Util {
[ExecuteInEditMode]
[RequireComponent(typeof(MeshRenderer), typeof(MeshFilter))]
public class Tile9Mesh : MonoBehaviour {
public float width = 1;
public float height = 1;
[Range(0, 0.5f)]
public float uvLeft = 0.2f;
[Range(0, 0.5f)]
public float uvRight = 0.2f;
[Range(0, 0.5f)]
public float uvTop = 0.2f;
[Range(0, 0.5f)]
public float uvBottom = 0.2f;
public float uvToWorldScaleX = 1;
public float uvToWorldScaleY = 1;
private Mesh mesh;
private Vector3[] vertices;
private Vector2[] uv;
private void Start() {
vertices = new Vector3[16];
uv = new Vector2[16];
mesh = new Mesh {
name = "Tile9Mesh"
};
FillGeometry();
FillMesh();
mesh.triangles = new[] {
0, 1, 12, 0, 12, 11,
1, 2, 13, 1, 13, 12,
2, 3, 4, 2, 4, 13,
13, 4, 5, 13, 5, 14,
14, 5, 6, 14, 6, 7,
15, 14, 7, 15, 7, 8,
10, 15, 8, 10, 8, 9,
11, 12, 15, 11, 15, 10,
12, 13, 14, 12, 14, 15
};
RecalculateMesh();
gameObject.GetComponent<MeshFilter>().mesh = mesh;
}
public void UpdateMesh() {
if (mesh != null) {
FillGeometry();
FillMesh();
mesh.RecalculateBounds();
mesh.RecalculateNormals();
}
}
private void FillGeometry() {
{
float w = width;
float h = height;
float l = uvLeft * uvToWorldScaleX;
float r = width - uvRight * uvToWorldScaleX;
float t = height - uvTop * uvToWorldScaleY;
float b = uvBottom * uvToWorldScaleY;
vertices[0] = new Vector3(0, 0, 0);
vertices[1] = new Vector3(0, b, 0);
vertices[2] = new Vector3(0, t, 0);
vertices[3] = new Vector3(0, h, 0);
vertices[4] = new Vector3(l, h, 0);
vertices[5] = new Vector3(r, h, 0);
vertices[6] = new Vector3(w, h, 0);
vertices[7] = new Vector3(w, t, 0);
vertices[8] = new Vector3(w, b, 0);
vertices[9] = new Vector3(w, 0, 0);
vertices[10] = new Vector3(r, 0, 0);
vertices[11] = new Vector3(l, 0, 0);
vertices[12] = new Vector3(l, b, 0);
vertices[13] = new Vector3(l, t, 0);
vertices[14] = new Vector3(r, t, 0);
vertices[15] = new Vector3(r, b, 0);
}
{
const float w = 1;
const float h = 1;
float l = uvLeft;
float r = 1 - uvRight;
float t = 1 - uvTop;
float b = uvBottom;
uv[0] = new Vector2(0, 0);
uv[1] = new Vector2(0, b);
uv[2] = new Vector2(0, t);
uv[3] = new Vector2(0, h);
uv[4] = new Vector2(l, h);
uv[5] = new Vector2(r, h);
uv[6] = new Vector2(w, h);
uv[7] = new Vector2(w, t);
uv[8] = new Vector2(w, b);
uv[9] = new Vector2(w, 0);
uv[10] = new Vector2(r, 0);
uv[11] = new Vector2(l, 0);
uv[12] = new Vector2(l, b);
uv[13] = new Vector2(l, t);
uv[14] = new Vector2(r, t);
uv[15] = new Vector2(r, b);
}
}
private void FillMesh() {
mesh.vertices = vertices;
mesh.uv = uv;
}
private void RecalculateMesh() {
mesh.RecalculateBounds();
mesh.RecalculateNormals();
}
#if UNITY_EDITOR
private void OnValidate() {
if (mesh != null) {
UpdateMesh();
}
}
#endif
}
}