Unity, How To SetPixels32 Properly? - unity3d

I'm receiving the error SetPixels32 called with invalid number of pixels in the array UnityEngine.Texture2D:SetPixels32(Color32[]) on a line of code that is trying to retrieve a pixel array.
In some instances I receive the error but in others the webcams stream just fine. I'm not sure why this is occurring. This is the line of code that is giving me problems:
streamTexture.SetPixels32(webcamTexture.GetPixels32(pixels))
That isn't much to go on, however, below is the full script. If Anyone can tell me why this error is occurring since the streaming texture is set to the dimensions of the webcam texture. Any help is much appreciated!
using System.Collections;
using UnityEngine;
using Photon.Pun;
using UnityEngine.UI;
public class WebcamStream : MonoBehaviourPun, IPunObservable
{
[SerializeField] private LocalPlayerSettings playerSettings;
[SerializeField] private AvatarHandler parent;
[SerializeField] private RawImage streamRawimage;
private WebCamTexture webcamTexture;
private Texture2D streamTexture;
private Color32[] pixels;
private byte[] data;
private void OnEnable()
{
if (!parent.photonView.IsMine)
return;
InitWebcam();
}
private void OnDisable()
{
if (!parent.photonView.IsMine)
return;
webcamTexture.Stop();
}
private void InitWebcam()
{
//cast dimensions of target UI as ints for new webcam texture
int width = (int)streamRawimage.rectTransform.rect.width;
int height = (int)streamRawimage.rectTransform.rect.height;
//set new dimensions and target device
webcamTexture = new WebCamTexture(width, height)
{
deviceName = playerSettings.Webcam
};
//display webcam texture on the raw image and start camera
streamRawimage.material.mainTexture = webcamTexture;
webcamTexture.Play();
//set pixels and stream texture to match webcamTexture
pixels = new Color32[webcamTexture.width * webcamTexture.height];
streamTexture = new Texture2D(webcamTexture.width, webcamTexture.height, TextureFormat.RGB24, false);
//Begin Streaming Webcam Data
StartCoroutine(StreamWebcam());
}
private IEnumerator StreamWebcam()
{
while (webcamTexture.deviceName == playerSettings.Webcam)
{
if (webcamTexture.isPlaying)
{
//set the target texture pixels to the webcam texture pixels and apply get/set
streamTexture.SetPixels32(webcamTexture.GetPixels32(pixels));
streamTexture.Apply();
//convert image to byte array
data = streamTexture.EncodeToJPG();
}
yield return null;
}
webcamTexture.Stop();
if(WebCamTexture.devices.Length > 0)
{
InitWebcam();
}
}
public void OnPhotonSerializeView(PhotonStream stream, PhotonMessageInfo info)
{
if (stream.IsWriting)
{
//send the byte array through the stream
stream.SendNext(data);
}
else
{
//convert object received into byte array via cast
data = (byte[])stream.ReceiveNext();
//create new texture to load received data into
streamTexture = new Texture2D(1, 1, TextureFormat.RGB24, false);
streamTexture.LoadImage(data);
//set webcam raw image texture to the newly updated texture
streamRawimage.texture = streamTexture;
}
}
}

Solved! There was another script responsible for enabling this scripts game object and needed a yield for about 1 second.
Also a quick edit of streamRawimage.material.mainTexture = webcamTexture; to streamRawimage.texture = webcamTexture; fixed another issue surrounding a missing texture.

Related

How to live display webcam of players in Unity? (I tried with Photon)

I am trying to make a multiplayer game where each player has their webcam displayed in the face of their avatar. I am getting the null reference exception: object reference not set to an instance of an object. The following code is attached to a quad in which the webcam output that was
converted to jpeg should be displayed. Any help would be appreciated.
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using Photon.Pun;
public class photonwebcam : MonoBehaviour
{
WebCamTexture webcamTexture;
PhotonView photonView;
void Start()
{
PhotonView photonView = PhotonView.Get(this);
if (!webcamTexture.isPlaying)
{
webcamTexture = new WebCamTexture();
Renderer renderer = GetComponent<Renderer>();
renderer.material.mainTexture = webcamTexture;
webcamTexture.Play();
}
}
void FixedUpdate()
{
Texture2D tex = new Texture2D(webcamTexture.width, webcamTexture.height, TextureFormat.RGB24, false);
tex.SetPixels(webcamTexture.GetPixels());
tex.Apply();
byte[] bytes = tex.EncodeToJPG();
photonView.RPC("VideoToBytes", RpcTarget.AllBuffered, bytes);
Resources.UnloadUnusedAssets();
}
[PunRPC]
public void VideoToBytes(byte[] bytes)
{
if (bytes!= null)
{
Texture2D texa = new Texture2D(2, 2);
// Load data into the texture.
texa.LoadImage(bytes);
// Assign texture to renderer's material.
GetComponent<Renderer>().material.mainTexture = texa;
}
}
}
One problem that will give you NullPointerException is that in Start you create a local variable photonView, instead of assigning the field.
The problem with this approach is that RPC is not optimized for large data, so the image will be laggy.
I think you can use Agora instead of RPC to handle webcam sharing.

Cannot get pixels when webcam is not running

I get the following error when I attached coroutine TakePhoto to a button, can someone help?
Error:
Cannot get pixels when webcam is not running
UnityEngine.WebCamTexture:GetPixels ()
GetCam/d__6:MoveNext () (at Assets/GetCam.cs:52)
UnityEngine.GUIUtility:ProcessEvent (int,intptr,bool&)
CodeI took it from
```
using UnityEngine;
using System.Collections;
using System.IO;
using UnityEngine.UI;
public class GetCam: MonoBehaviour
{
WebCamTexture webCam;
string your_path = "C:\\Users\\Jay\\Desktop";
public RawImage display;
public AspectRatioFitter fit;
public void Start()
{
webCam = new WebCamTexture();
webCam.Play();
StartCoroutine(TakePhoto());
display.texture = webCam;
}
public void Update(){
float ratio = (float)webCam.width / (float)webCam.height;
fit.aspectRatio = ratio;
float ScaleY = webCam.videoVerticallyMirrored ? -1f: 1f;
display.rectTransform.localScale = new Vector3(1f,ScaleY, 1f);
int orient = -webCam.videoRotationAngle;
display.rectTransform.localEulerAngles = new Vector3(0,0,orient);
}
IEnumerator TakePhoto() // Start this Coroutine on some button click
{
// NOTE - you almost certainly have to do this here:
yield return new WaitForEndOfFrame();
// it's a rare case where the Unity doco is pretty clear,
// http://docs.unity3d.com/ScriptReference/WaitForEndOfFrame.html
// be sure to scroll down to the SECOND long example on that doco page
Texture2D photo = new Texture2D(webCam.width, webCam.height);
photo.SetPixels(webCam.GetPixels());
photo.Apply();
//Encode to a PNG
byte[] bytes = photo.EncodeToPNG();
//Write out the PNG. Of course you have to substitute your_path for something sensible
File.WriteAllBytes(your_path + "photo.png", bytes);
//lol
}
}

Unity code works in the editor but does not work on Build (EXE File) C#

My code load external images and build from them a SkyBox for unity3d.
All the files of the SkyBox are on the right paths (Copied manually).
The code loads 6 external images and then builds a SkyBox And I think there's a problem in the loading when it's in build stat. (But not sure)
Or maybe Unity prevents me from doing it?
And i have no error or warning code. It really drives me crazy!!!
using System.IO;
using UnityEngine;
public class ChangeSkyBox : MonoBehaviour
{
public static Texture2D LoadPNG(string filePath)
{
Texture2D tex = null;
byte[] fileData;
if (File.Exists(filePath))
{
fileData = File.ReadAllBytes(filePath);
tex = new Texture2D(1024, 1024);
tex.LoadImage(fileData);
}
tex.wrapMode = TextureWrapMode.Clamp;
return tex;
}
public static Material CreateSkyboxMaterial(SkyboxManifest manifest)
{
Material result = new Material(Shader.Find("RenderFX/Skybox"));
result.SetTexture("_FrontTex", manifest.textures[0]);
result.SetTexture("_BackTex", manifest.textures[1]);
result.SetTexture("_LeftTex", manifest.textures[2]);
result.SetTexture("_RightTex", manifest.textures[3]);
result.SetTexture("_UpTex", manifest.textures[4]);
result.SetTexture("_DownTex", manifest.textures[5]);
return result;
}
private Texture2D[] textures;
private void Start()
{
Texture2D xt1 = LoadPNG(Directory.GetCurrentDirectory() + "\\Assets\\SkyBox\\Front.png");
Texture2D xt2 = LoadPNG(Directory.GetCurrentDirectory() + "\\Assets\\SkyBox\\Back.png");
Texture2D xt3 = LoadPNG(Directory.GetCurrentDirectory() + "\\Assets\\SkyBox\\Left.png");
Texture2D xt4 = LoadPNG(Directory.GetCurrentDirectory() + "\\Assets\\SkyBox\\Right.png");
Texture2D xt5 = LoadPNG(Directory.GetCurrentDirectory() + "\\Assets\\SkyBox\\Top.png");
Texture2D xt6 = LoadPNG(Directory.GetCurrentDirectory() + "\\Assets\\SkyBox\\Bottom.png");
SkyboxManifest manifest = new SkyboxManifest(xt1, xt2, xt3, xt4, xt5, xt6);
Material newMat = new Material(Shader.Find("RenderFX/Skybox"));
newMat = CreateSkyboxMaterial(manifest);
RenderSettings.skybox = newMat;
DynamicGI.UpdateEnvironment();
}
}
public struct SkyboxManifest
{
public Texture2D[] textures;
public SkyboxManifest(Texture2D front, Texture2D back, Texture2D left, Texture2D right, Texture2D up, Texture2D down)
{
textures = new Texture2D[6]
{
front,
back,
left,
right,
up,
down
};
}
}
I believe your problem lies on this Line.
Material result = new Material(Shader.Find("RenderFX/Skybox"));
Unity cannot find it at runtime. To fix it, make the "base" Material by hand in Unity and attach it to your script throurgh the Inspector.

How to access Hololens front camera

I'm working with hololens, and I'm trying to get the image of the front camera. The only thing I need is to take the image of each frame of that camera and transform it into a byte array.
Follow Unity Example in the documentation. It has a well written example:
https://docs.unity3d.com/Manual/windowsholographic-photocapture.html
Copied from the unity documentation from the link above:
using UnityEngine;
using System.Collections;
using System.Linq;
using UnityEngine.XR.WSA.WebCam;
public class PhotoCaptureExample : MonoBehaviour {
PhotoCapture photoCaptureObject = null;
Texture2D targetTexture = null;
// Use this for initialization
void Start() {
Resolution cameraResolution = PhotoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
targetTexture = new Texture2D(cameraResolution.width, cameraResolution.height);
// Create a PhotoCapture object
PhotoCapture.CreateAsync(false, delegate (PhotoCapture captureObject) {
photoCaptureObject = captureObject;
CameraParameters cameraParameters = new CameraParameters();
cameraParameters.hologramOpacity = 0.0f;
cameraParameters.cameraResolutionWidth = cameraResolution.width;
cameraParameters.cameraResolutionHeight = cameraResolution.height;
cameraParameters.pixelFormat = CapturePixelFormat.BGRA32;
// Activate the camera
photoCaptureObject.StartPhotoModeAsync(cameraParameters, delegate (PhotoCapture.PhotoCaptureResult result) {
// Take a picture
photoCaptureObject.TakePhotoAsync(OnCapturedPhotoToMemory);
});
});
}
void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) {
// Copy the raw image data into the target texture
photoCaptureFrame.UploadImageDataToTexture(targetTexture);
// Create a GameObject to which the texture can be applied
GameObject quad = GameObject.CreatePrimitive(PrimitiveType.Quad);
Renderer quadRenderer = quad.GetComponent<Renderer>() as Renderer;
quadRenderer.material = new Material(Shader.Find("Custom/Unlit/UnlitTexture"));
quad.transform.parent = this.transform;
quad.transform.localPosition = new Vector3(0.0f, 0.0f, 3.0f);
quadRenderer.material.SetTexture("_MainTex", targetTexture);
// Deactivate the camera
photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
}
void OnStoppedPhotoMode(PhotoCapture.PhotoCaptureResult result) {
// Shutdown the photo capture resource
photoCaptureObject.Dispose();
photoCaptureObject = null;
}
}
To get bytes: Replace the following method:
void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame) {
// Copy the raw image data into the target texture
photoCaptureFrame.UploadImageDataToTexture(targetTexture);
// Create a GameObject to which the texture can be applied
GameObject quad = GameObject.CreatePrimitive(PrimitiveType.Quad);
Renderer quadRenderer = quad.GetComponent<Renderer>() as Renderer;
quadRenderer.material = new Material(Shader.Find("Custom/Unlit/UnlitTexture"));
quad.transform.parent = this.transform;
quad.transform.localPosition = new Vector3(0.0f, 0.0f, 3.0f);
quadRenderer.material.SetTexture("_MainTex", targetTexture);
// Deactivate the camera
photoCaptureObject.StopPhotoModeAsync(OnStoppedPhotoMode);
}
with the method below:
void OnCapturedPhotoToMemory(PhotoCapture.PhotoCaptureResult result, PhotoCaptureFrame photoCaptureFrame)
{
List<byte> imageBufferList = new List<byte>();
imageBufferList.Clear();
photoCaptureFrame.CopyRawImageDataIntoBuffer(imageBufferList);
var bytesArray = imageBufferList.ToArray();
}
If you are using Unity 2018.1 or 2018.2 (I think also 2017.4) now, then it will NOT work. Unity has public tracker to resolve it:
https://issuetracker.unity3d.com/issues/windowsmr-failure-to-take-photo-capture-in-hololens
I created a workaround until Unity fixes the bug:
https://github.com/MSAlshair/HoloLensMediaCapture
Basic sample without using PhotoCapture from unity as a workaround: More details in the link above
You must add #if WINDOWS_UWP to be able to use MediaCapture: Ideally, you want to use PhotoCapture from unity to avoid this, but until Unity resolve the issue, you can use something like this.
#if WINDOWS_UWP
public async System.Threading.Tasks.Task<byte[]> GetPhotoAsync()
{
//Get available devices info
var devices = await Windows.Devices.Enumeration.DeviceInformation.FindAllAsync(
Windows.Devices.Enumeration.DeviceClass.VideoCapture);
var numberOfDevices = devices.Count;
byte[] photoBytes = null;
//Check if the device has camera
if (devices.Count > 0)
{
Windows.Media.Capture.MediaCapture mediaCapture = new Windows.Media.Capture.MediaCapture();
await mediaCapture.InitializeAsync();
//Get Highest available resolution
var highestResolution = mediaCapture.VideoDeviceController.GetAvailableMediaStreamProperties(
Windows.Media.Capture.MediaStreamType.Photo).
Select(item => item as Windows.Media.MediaProperties.ImageEncodingProperties).
Where(item => item != null).
OrderByDescending(Resolution => Resolution.Height * Resolution.Width).
ToList().First();
using (var photoRandomAccessStream = new Windows.Storage.Streams.InMemoryRandomAccessStream())
{
await mediaCapture.CapturePhotoToStreamAsync(highestResolution, photoRandomAccessStream);
//Covnert stream to byte array
photoBytes = await ConvertFromInMemoryRandomAccessStreamToByteArrayAsync(photoRandomAccessStream);
}
}
else
{
System.Diagnostics.Debug.WriteLine("No camera device detected!");
}
return photoBytes;
}
public static async System.Threading.Tasks.Task<byte[]> ConvertFromInMemoryRandomAccessStreamToByteArrayAsync(
Windows.Storage.Streams.InMemoryRandomAccessStream inMemoryRandomAccessStream)
{
using (var dataReader = new Windows.Storage.Streams.DataReader(inMemoryRandomAccessStream.GetInputStreamAt(0)))
{
var bytes = new byte[inMemoryRandomAccessStream.Size];
await dataReader.LoadAsync((uint)inMemoryRandomAccessStream.Size);
dataReader.ReadBytes(bytes);
return bytes;
}
}
#endif
Do NOT forget to add capabilities to the manifest:
WebCam
Microphone: I am not sure if it needed since we are only taking photos or not, but I added it anyway.
Pictures Library if you want to save to it
In player settings, enable access to the camera, and try again.

Modify WebcamTexture from unity native plugin

I just want to pass a WebcamTexture to a native plugin (using GetNativeTexturePtr() for performance), draw something on it and render results into a plane on the screen.
I guess that some Unity thread would be updating the WebcamTexture contents every frame so writing to that texture may produce some flickering effects as there would be 2 threads updating its contents. Because of that I'm using another texture "drawTexture" to render the result.
The algorithm is easy, on every render event:
Read camTexture
Copy camTexture contents to drawTexture
Draw things on drawTexture
Render drawTexture (OpenGL bindTexture)
I'm following the NativeRenderingPlugin example, but every time I try to copy contents from camTexture to drawTexture (even only 1 pixel) the main thread freezes.
I think that may be happening because I'm trying to read camTexture while it is being modified by an external thread.
Here it is some code:
C# plugin source
[DllImport ("plugin")]
private static extern void setTexture (IntPtr cam, IntPtr draw, int width, int height);
[DllImport ("plugin")]
private static extern IntPtr GetRenderEventFunc();
WebCamTexture webcamTexture;
Texture2D drawTexture;
GameObject plane;
bool initialized = false;
IEnumerator Start () {
// PlaneTexture
// Create a texture
drawTexture = new Texture2D(1280,720,TextureFormat.BGRA32,true);
drawTexture.filterMode = FilterMode.Point;
drawTexture.Apply ();
plane = GameObject.Find("Plane");
plane.GetComponent<MeshRenderer> ().material.mainTexture = drawTexture;
// Webcam texture
webcamTexture = new WebCamTexture();
webcamTexture.filterMode = FilterMode.Point;
webcamTexture.Play();
yield return StartCoroutine("CallPluginAtEndOfFrames");
}
private IEnumerator CallPluginAtEndOfFrames()
{
while (true) {
// Wait until all frame rendering is done
yield return new WaitForEndOfFrame();
// wait for webcam and initialize
if (!initialized && webcamTexture.width > 16) {
setTexture (webcamTexture.GetNativeTexturePtr(), drawTexture.GetNativeTexturePtr(), webcamTexture.width, webcamTexture.height);
initialized = true;
} else if (initialized) {
// Issue a plugin event with arbitrary integer identifier.
// The plugin can distinguish between different
// things it needs to do based on this ID.
// For our simple plugin, it does not matter which ID we pass here.
GL.IssuePluginEvent (GetRenderEventFunc (), 1);
}
}
}
C++ plugin (draw texture method)
static void ModifyTexturePixels()
{
void* textureHandle = drawTexturePointer;
int width = textureWidth;
int height = textureHeight;
if (!textureHandle)
return;
int textureRowPitch;
void* newEmptyTexturePointer = (unsigned char*)s_CurrentAPI->BeginModifyTexture(textureHandle, width, height, &textureRowPitch);
memcpy(newEmptyTexturePointer, webcamTexturePointer, width * height * 4);
s_CurrentAPI->EndModifyTexture(textureHandle, width, height, textureRowPitch, newEmptyTexturePointer);
}
Any help would be appreciated.