playing sound in unity when detect multiple image target - unity3d

i detect more than one image target in #unity when i detect target i play a sound that pronounce the image target name. if i detect one target it play the sound loud but when detect multiple target it play one of sound loud and play the others very low or does not play them.
please i want specific way to do this in tutorial or explain that in your answer .
using UnityEngine;
public class SoundPlay: MonoBehaviour,ITrackableEventHandler
{
#region PRIVATE_MEMBER_VARIABLES
private TrackableBehaviour mTrackableBehaviour;
public AudioClip sound;
#endregion // PRIVATE_MEMBER_VARIABLES
#region UNTIY_MONOBEHAVIOUR_METHODS
void Start()
{
mTrackableBehaviour = GetComponent<TrackableBehaviour>();
if (mTrackableBehaviour)
{
mTrackableBehaviour.RegisterTrackableEventHandler(this);
}
}
#endregion // UNTIY_MONOBEHAVIOUR_METHODS
#region PUBLIC_METHODS
/// <summary>
/// Implementation of the ITrackableEventHandler function called when the
/// tracking state changes.
/// </summary>
public void OnTrackableStateChanged(
TrackableBehaviour.Status previousStatus,
TrackableBehaviour.Status newStatus)
{
if (newStatus == TrackableBehaviour.Status.DETECTED ||
newStatus == TrackableBehaviour.Status.TRACKED)
{
OnTrackingFound();
}
else
{
OnTrackingLost();
}
}
#endregion // PUBLIC_METHODS
#region PRIVATE_METHODS
private void OnTrackingFound()
{
Renderer[] rendererComponents = GetComponentsInChildren<Renderer>(true);
Collider[] colliderComponents = GetComponentsInChildren<Collider>(true);
// Enable rendering:
foreach (Renderer component in rendererComponents)
{
component.enabled = true;
}
// Enable colliders:
foreach (Collider component in colliderComponents)
{
component.enabled = true;
}
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found");
GetComponentInChildren<fiveSound>().Play();
}
private void OnTrackingLost()
{
Renderer[] rendererComponents = GetComponentsInChildren<Renderer>(true);
Collider[] colliderComponents = GetComponentsInChildren<Collider>(true);
// Disable rendering:
foreach (Renderer component in rendererComponents)
{
component.enabled = false;
}
// Disable colliders:
foreach (Collider component in colliderComponents)
{
component.enabled = false;
}
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost");
}
public void Play(){
AudioSource.PlayClipAtPoint(sound, transform.position);
}
#endregion // PRIVATE_METHODS
}

Is 3D sound checked?
If it is then uncheck. Let me know how you get on!
^._.^

using UnityEngine;
using Vuforia;
public class SoundPlay: MonoBehaviour,ITrackableEventHandler
{
#region PRIVATE_MEMBER_VARIABLES
private TrackableBehaviour mTrackableBehaviour;
public AudioClip sound;
#endregion // PRIVATE_MEMBER_VARIABLES
#region UNTIY_MONOBEHAVIOUR_METHODS
void Start()
{
mTrackableBehaviour = GetComponent<TrackableBehaviour>();
if (mTrackableBehaviour)
{
mTrackableBehaviour.RegisterTrackableEventHandler(this);
}
}
#endregion // UNTIY_MONOBEHAVIOUR_METHODS
#region PUBLIC_METHODS
/// <summary>
/// Implementation of the ITrackableEventHandler function called when the
/// tracking state changes.
/// </summary>
public void OnTrackableStateChanged(
TrackableBehaviour.Status previousStatus,
TrackableBehaviour.Status newStatus)
{
if (newStatus == TrackableBehaviour.Status.DETECTED ||
newStatus == TrackableBehaviour.Status.TRACKED)
{
OnTrackingFound();
}
else
{
OnTrackingLost();
}
}
#endregion // PUBLIC_METHODS
#region PRIVATE_METHODS
private void OnTrackingFound()
{
Renderer[] rendererComponents = GetComponentsInChildren<Renderer>(true);
Collider[] colliderComponents = GetComponentsInChildren<Collider>(true);
// Enable rendering:
foreach (Renderer component in rendererComponents)
{
component.enabled = true;
}
// Enable colliders:
foreach (Collider component in colliderComponents)
{
component.enabled = true;
}
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found");
GetComponentInChildren<AudioSource>().Play();
}
private void OnTrackingLost()
{
Renderer[] rendererComponents = GetComponentsInChildren<Renderer>(true);
Collider[] colliderComponents = GetComponentsInChildren<Collider>(true);
// Disable rendering:
foreach (Renderer component in rendererComponents)
{
component.enabled = false;
}
// Disable colliders:
foreach (Collider component in colliderComponents)
{
component.enabled = false;
}
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost");
}
public void Play(){
AudioSource.PlayClipAtPoint(sound, transform.position);
}
#endregion // PRIVATE_METHODS
}

Related

How can I get a Target name from an image target to play a video player when finding a marker in an augmented reality app in Unity and vuforia

enter image description hereHow can I get a Target name from an image target to play a video player when finding a marker in an augmented reality app. basically I have this script and I want to modify it so that when detecting a marker it shows a 3d augmented reality object and on the side a video.Here's the script:
/*==============================================================================
Copyright (c) 2021 PTC Inc. All Rights Reserved.
Confidential and Proprietary - Protected under copyright and other laws.
Vuforia is a trademark of PTC Inc., registered in the United States and other
countries.
==============================================================================*/
using System;
using UnityEngine;
using UnityEngine.Events;
using Vuforia;
using UnityEngine.Video;
/// <summary>
/// A custom handler that implements the ITrackableEventHandler interface.
///
/// Changes made to this file could be overwritten when upgrading the Vuforia version.
/// When implementing custom event handler behavior, consider inheriting from this class instead.
/// </summary>
/// </summary>
public class ScriptModificado : MonoBehaviour
{
public enum TrackingStatusFilter
{
Tracked,
Tracked_ExtendedTracked,
Tracked_ExtendedTracked_Limited
}
/// <summary>
/// A filter that can be set to either:
/// - Only consider a target if it's in view (TRACKED)
/// - Also consider the target if's outside of the view, but the environment is tracked (EXTENDED_TRACKED)
/// - Even consider the target if tracking is in LIMITED mode, e.g. the environment is just 3dof tracked.
/// </summary>
public TrackingStatusFilter StatusFilter = TrackingStatusFilter.Tracked_ExtendedTracked_Limited;
public bool UsePoseSmoothing = false;
public AnimationCurve AnimationCurve = AnimationCurve.Linear(0, 0, LERP_DURATION, 1);
public UnityEvent OnTargetFound;
public UnityEvent OnTargetLost;
protected ObserverBehaviour mObserverBehaviour;
protected TargetStatus mPreviousTargetStatus = TargetStatus.NotObserved;
protected bool mCallbackReceivedOnce;
const float LERP_DURATION = 0.3f;
PoseSmoother mPoseSmoother;
// public AudioSource aSource;
// public AudioClip aClip;
public VideoPlayer vPlayer;
public GameObject BotonReproducir;
protected virtual void Start()
{
mObserverBehaviour = GetComponent<ObserverBehaviour>();
if (mObserverBehaviour)
{
mObserverBehaviour.OnTargetStatusChanged += OnObserverStatusChanged;
mObserverBehaviour.OnBehaviourDestroyed += OnObserverDestroyed;
OnObserverStatusChanged(mObserverBehaviour, mObserverBehaviour.TargetStatus);
SetupPoseSmoothing();
}
}
protected virtual void OnDestroy()
{
if (VuforiaBehaviour.Instance != null)
VuforiaBehaviour.Instance.World.OnStateUpdated -= OnStateUpdated;
if (mObserverBehaviour)
OnObserverDestroyed(mObserverBehaviour);
mPoseSmoother?.Dispose();
}
void OnObserverDestroyed(ObserverBehaviour observer)
{
mObserverBehaviour.OnTargetStatusChanged -= OnObserverStatusChanged;
mObserverBehaviour.OnBehaviourDestroyed -= OnObserverDestroyed;
mObserverBehaviour = null;
}
void OnObserverStatusChanged(ObserverBehaviour behaviour, TargetStatus targetStatus)
{
var name = mObserverBehaviour.TargetName;
if (mObserverBehaviour is VuMarkBehaviour vuMarkBehaviour && vuMarkBehaviour.InstanceId != null)
{
name += " (" + vuMarkBehaviour.InstanceId + ")";
}
Debug.Log($"Target status: {name} {targetStatus.Status} -- {targetStatus.StatusInfo}");
HandleTargetStatusChanged(mPreviousTargetStatus.Status, targetStatus.Status);
HandleTargetStatusInfoChanged(targetStatus.StatusInfo);
mPreviousTargetStatus = targetStatus;
}
protected virtual void HandleTargetStatusChanged(Status previousStatus, Status newStatus)
{
var shouldBeRendererBefore = ShouldBeRendered(previousStatus);
var shouldBeRendererNow = ShouldBeRendered(newStatus);
if (shouldBeRendererBefore != shouldBeRendererNow)
{
if (shouldBeRendererNow)
{
OnTrackingFound();
/*if (mObserverBehaviour.TargetName == "nocturno")
{
// aSource.PlayOneShot(aClip);
} */
if (mObserverBehaviour.TargetName == "nocturno")
{
vPlayer.Play();
}
}
else
{
OnTrackingLost();
if (mObserverBehaviour.TargetName == "nocturno")
{
// aSource.Stop();
vPlayer.Stop();
BotonReproducir.SetActive(false);
}
}
}
else
{
if (!mCallbackReceivedOnce && !shouldBeRendererNow)
{
// This is the first time we are receiving this callback, and the target is not visible yet.
// --> Hide the augmentation.
OnTrackingLost();
}
}
mCallbackReceivedOnce = true;
}
protected virtual void HandleTargetStatusInfoChanged(StatusInfo newStatusInfo)
{
if (newStatusInfo == StatusInfo.WRONG_SCALE)
{
Debug.LogErrorFormat("The target {0} appears to be scaled incorrectly. " +
"This might result in tracking issues. " +
"Please make sure that the target size corresponds to the size of the " +
"physical object in meters and regenerate the target or set the correct " +
"size in the target's inspector.", mObserverBehaviour.TargetName);
}
}
protected bool ShouldBeRendered(Status status)
{
if (status == Status.TRACKED)
{
// always render the augmentation when status is TRACKED, regardless of filter
return true;
}
if (StatusFilter == TrackingStatusFilter.Tracked_ExtendedTracked && status == Status.EXTENDED_TRACKED)
{
// also return true if the target is extended tracked
return true;
}
if (StatusFilter == TrackingStatusFilter.Tracked_ExtendedTracked_Limited &&
(status == Status.EXTENDED_TRACKED || status == Status.LIMITED))
{
// in this mode, render the augmentation even if the target's tracking status is LIMITED.
// this is mainly recommended for Anchors.
return true;
}
return false;
}
protected virtual void OnTrackingFound()
{
if (mObserverBehaviour)
{
var rendererComponents = mObserverBehaviour.GetComponentsInChildren<Renderer>(true);
var colliderComponents = mObserverBehaviour.GetComponentsInChildren<Collider>(true);
var canvasComponents = mObserverBehaviour.GetComponentsInChildren<Canvas>(true);
// Enable rendering:
foreach (var component in rendererComponents)
component.enabled = true;
// Enable colliders:
foreach (var component in colliderComponents)
component.enabled = true;
// Enable canvas':
foreach (var component in canvasComponents)
component.enabled = true;
}
OnTargetFound?.Invoke();
}
protected virtual void OnTrackingLost()
{
if (mObserverBehaviour)
{
var rendererComponents = mObserverBehaviour.GetComponentsInChildren<Renderer>(true);
var colliderComponents = mObserverBehaviour.GetComponentsInChildren<Collider>(true);
var canvasComponents = mObserverBehaviour.GetComponentsInChildren<Canvas>(true);
// Disable rendering:
foreach (var component in rendererComponents)
component.enabled = false;
// Disable colliders:
foreach (var component in colliderComponents)
component.enabled = false;
// Disable canvas':
foreach (var component in canvasComponents)
component.enabled = false;
}
OnTargetLost?.Invoke();
}
protected void SetupPoseSmoothing()
{
UsePoseSmoothing &= VuforiaBehaviour.Instance.WorldCenterMode == WorldCenterMode.DEVICE; // pose smoothing only works with the DEVICE world center mode
mPoseSmoother = new PoseSmoother(mObserverBehaviour, AnimationCurve);
VuforiaBehaviour.Instance.World.OnStateUpdated += OnStateUpdated;
}
void OnStateUpdated()
{
if (enabled && UsePoseSmoothing)
mPoseSmoother.Update();
}
class PoseSmoother
{
const float e = 0.001f;
const float MIN_ANGLE = 2f;
PoseLerp mActivePoseLerp;
Pose mPreviousPose;
readonly ObserverBehaviour mTarget;
readonly AnimationCurve mAnimationCurve;
TargetStatus mPreviousStatus;
public PoseSmoother(ObserverBehaviour target, AnimationCurve animationCurve)
{
mTarget = target;
mAnimationCurve = animationCurve;
}
public void Update()
{
var currentPose = new Pose(mTarget.transform.position, mTarget.transform.rotation);
var currentStatus = mTarget.TargetStatus;
UpdatePoseSmoothing(currentPose, currentStatus);
mPreviousPose = currentPose;
mPreviousStatus = currentStatus;
}
void UpdatePoseSmoothing(Pose currentPose, TargetStatus currentTargetStatus)
{
if (mActivePoseLerp == null && ShouldSmooth(currentPose, currentTargetStatus))
{
mActivePoseLerp = new PoseLerp(mPreviousPose, currentPose, mAnimationCurve);
}
if (mActivePoseLerp != null)
{
var pose = mActivePoseLerp.GetSmoothedPosition(Time.deltaTime);
mTarget.transform.SetPositionAndRotation(pose.position, pose.rotation);
if (mActivePoseLerp.Complete)
{
mActivePoseLerp = null;
}
}
}
/// Smooth pose transition if the pose changed and the target is still being reported as "extended tracked" or it has just returned to
/// "tracked" from previously being "extended tracked"
bool ShouldSmooth(Pose currentPose, TargetStatus currentTargetStatus)
{
return (currentTargetStatus.Status == Status.EXTENDED_TRACKED || (currentTargetStatus.Status == Status.TRACKED && mPreviousStatus.Status == Status.EXTENDED_TRACKED)) &&
(Vector3.SqrMagnitude(currentPose.position - mPreviousPose.position) > e || Quaternion.Angle(currentPose.rotation, mPreviousPose.rotation) > MIN_ANGLE);
}
public void Dispose()
{
mActivePoseLerp = null;
}
}
class PoseLerp
{
readonly AnimationCurve mCurve;
readonly Pose mStartPose;
readonly Pose mEndPose;
readonly float mEndTime;
float mElapsedTime;
public bool Complete { get; private set; }
public PoseLerp(Pose startPose, Pose endPose, AnimationCurve curve)
{
mStartPose = startPose;
mEndPose = endPose;
mCurve = curve;
mEndTime = mCurve.keys[mCurve.length - 1].time;
}
public Pose GetSmoothedPosition(float deltaTime)
{
mElapsedTime += deltaTime;
if (mElapsedTime >= mEndTime)
{
mElapsedTime = 0;
Complete = true;
return mEndPose;
}
var ratio = mCurve.Evaluate(mElapsedTime);
var smoothPosition = Vector3.Lerp(mStartPose.position, mEndPose.position, ratio);
var smoothRotation = Quaternion.Slerp(mStartPose.rotation, mEndPose.rotation, ratio);
return new Pose(smoothPosition, smoothRotation);
}
}
}
So how do I get the target name of the mObserverBehaviour method to change it to the "night" one and put the target name of my image target that will play the video? and how can I modify this code to show a video when the vuforia AR camera detects the image target so that the video is displayed on the side in a 2d Sprite object so that it looks more or less like this video: https://www.youtube.com/watch?v=izJatV5ypvM
attached imagesenter image description here
[1]: https://i.stack.imgur.com/Buleh.png
enter image description here
enter image description here
enter image description here

How to make play and pause button on Augmented Reality (Vuforia) video playback in Unity?

There is a problem on my project that when it plays the video play automatically. How I want to make the video play and pause in Unity. I'm using Unity 2019.3.5f1 and Vuforia. Is there any tutorial about how to make video playback AR can play and pause?
I try this code but it doesn't work. I don't know why?
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using Vuforia;
using UnityEngine.Video;
public class PlayControl : MonoBehaviour, ITrackableEventHandler
{
public GameObject videoplayer, Playvideo_button, Pausevideo_button, Stopvideo_button;
protected TrackableBehaviour mTrackableBehaviour;
public void OnTrackableStateChanged(TrackableBehaviour.Status previousStatus, TrackableBehaviour.Status newStatus)
{
if (newStatus == TrackableBehaviour.Status.DETECTED ||
newStatus == TrackableBehaviour.Status.TRACKED ||
newStatus == TrackableBehaviour.Status.EXTENDED_TRACKED)
{
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found");
if (mTrackableBehaviour.TrackableName == "Video_marker")
{
videoplayer.GetComponent<VideoPlayer>().Play();
}
OnTrackingFound();
}
else if (previousStatus == TrackableBehaviour.Status.TRACKED &&
newStatus == TrackableBehaviour.Status.NO_POSE)
{
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost");
videoplayer.GetComponent<VideoPlayer>().Stop();
OnTrackingLost();
}
else
{
// For combo of previousStatus=UNKNOWN + newStatus=UNKNOWN|NOT_FOUND
// Vuforia is starting, but tracking has not been lost or found yet
// Call OnTrackingLost() to hide the augmentations
OnTrackingLost();
}
}
// Start is called before the first frame update
void Start()
{
mTrackableBehaviour = GetComponent<TrackableBehaviour>();
if(mTrackableBehaviour)
{
mTrackableBehaviour.RegisterTrackableEventHandler(this);
}
}
protected virtual void OnTrackingFound()
{
var rendererComponents = GetComponentsInChildren<Renderer>(true);
var colliderComponents = GetComponentsInChildren<Collider>(true);
var canvasComponents = GetComponentsInChildren<Canvas>(true);
// Enable rendering:
foreach (var component in rendererComponents)
component.enabled = true;
// Enable colliders:
foreach (var component in colliderComponents)
component.enabled = true;
// Enable canvas':
foreach (var component in canvasComponents)
component.enabled = true;
}
protected virtual void OnTrackingLost()
{
var rendererComponents = GetComponentsInChildren<Renderer>(true);
var colliderComponents = GetComponentsInChildren<Collider>(true);
var canvasComponents = GetComponentsInChildren<Canvas>(true);
// Disable rendering:
foreach (var component in rendererComponents)
component.enabled = false;
// Disable colliders:
foreach (var component in colliderComponents)
component.enabled = false;
// Disable canvas':
foreach (var component in canvasComponents)
component.enabled = false;
}
// Update is called once per frame
void Update()
{
if (Input.GetMouseButtonDown(0))
{
Ray ray = Camera.main.ScreenPointToRay(Input.mousePosition);
RaycastHit hit;
if (Physics.Raycast(ray, out hit))
{
//case 1
if (hit.collider.tag == "Playvideo")
{
videoplayer.GetComponent<VideoPlayer>().Play();
Playvideo_button.SetActive(false);
Pausevideo_button.SetActive(true);
Stopvideo_button.SetActive(true);
}
//case 2
if (hit.collider.tag == "Stopvideo")
{
videoplayer.GetComponent<VideoPlayer>().Stop();
Playvideo_button.SetActive(true);
Pausevideo_button.SetActive(true);
Stopvideo_button.SetActive(false);
}
//case 3
if (hit.collider.tag == "Pausevideo")
{
videoplayer.GetComponent<VideoPlayer>().Pause();
Playvideo_button.SetActive(true);
Pausevideo_button.SetActive(false);
Stopvideo_button.SetActive(true);
}
}
}
}
}
It's not really clear where does this script runs in your project. The best way to make sure your hits are in the right place is to add the script to the buttons, which I see that you didn't. Did you make sure your Raycast reaches the right button? you can define a maxDistance to your Physics.Raycast that may be a great help here. Also in your "if" statements, I would check if there is a collision first and then check for the tag, something like this:
if (Physics.Raycast(ray, out hit, 900.0f))
{
if (hit.transform != null)
{
if (GameObject.ReferenceEquals(hit.transform.gameObject, playButtonGameObject))
{
videoplayer.GetComponent<VideoPlayer>().Play();
Playvideo_button.SetActive(false);
Pausevideo_button.SetActive(true);
Stopvideo_button.SetActive(true);
}
else if (GameObject.ReferenceEquals(hit.transform.gameObject, stopButtonGameObject))
{
videoplayer.GetComponent<VideoPlayer>().Stop();
Playvideo_button.SetActive(true);
Pausevideo_button.SetActive(true);
Stopvideo_button.SetActive(false);
}
else if (GameObject.ReferenceEquals(hit.transform.gameObject, pauseButtonGameObject))
{
videoplayer.GetComponent<VideoPlayer>().Pause();
Playvideo_button.SetActive(true);
Pausevideo_button.SetActive(false);
Stopvideo_button.SetActive(true);
}
}
}

Unity Vuforia Multiple Targets Video Autoplay Issue

I have an issue here with Unity. I've tried to search the solution online, tested it, but doesn't work. Right now, I'm developing an AR application with Unity and Vuforia. I'm using multiple image targets with 3D objects and video.
The issue is that the video will autoplay without scanning the target yet. I've unticked the 'play on awake' but it will only show me a white quad (as I attached the video on the quad object).
I've also tried to add quad.Play() & quad.Stop() on DefaultTrackableEventHandler script as some of the people manage to get it work.
Here are the scripts:
/*==============================================================================
Copyright (c) 2017 PTC Inc. All Rights Reserved.
Copyright (c) 2010-2014 Qualcomm Connected Experiences, Inc.
All Rights Reserved.
Confidential and Proprietary - Protected under copyright and other laws.
==============================================================================*/
using UnityEngine;
using Vuforia;
/// <summary>
/// A custom handler that implements the ITrackableEventHandler interface.
///
/// Changes made to this file could be overwritten when upgrading the Vuforia version.
/// When implementing custom event handler behavior, consider inheriting from this class instead.
/// </summary>
public class DefaultTrackableEventHandler : MonoBehaviour, ITrackableEventHandler
{
public UnityEngine.Video.VideoPlayer quad;
#region PROTECTED_MEMBER_VARIABLES
protected TrackableBehaviour mTrackableBehaviour;
protected TrackableBehaviour.Status m_PreviousStatus;
protected TrackableBehaviour.Status m_NewStatus;
#endregion // PROTECTED_MEMBER_VARIABLES
#region UNITY_MONOBEHAVIOUR_METHODS
protected virtual void Start()
{
mTrackableBehaviour = GetComponent<TrackableBehaviour>();
if (mTrackableBehaviour)
mTrackableBehaviour.RegisterTrackableEventHandler(this);
}
protected virtual void OnDestroy()
{
if (mTrackableBehaviour)
mTrackableBehaviour.UnregisterTrackableEventHandler(this);
}
#endregion // UNITY_MONOBEHAVIOUR_METHODS
#region PUBLIC_METHODS
/// <summary>
/// Implementation of the ITrackableEventHandler function called when the
/// tracking state changes.
/// </summary>
public void OnTrackableStateChanged(
TrackableBehaviour.Status previousStatus,
TrackableBehaviour.Status newStatus)
{
m_PreviousStatus = previousStatus;
m_NewStatus = newStatus;
if (newStatus == TrackableBehaviour.Status.DETECTED ||
newStatus == TrackableBehaviour.Status.TRACKED ||
newStatus == TrackableBehaviour.Status.EXTENDED_TRACKED)
{
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found");
OnTrackingFound();
}
else if (previousStatus == TrackableBehaviour.Status.TRACKED &&
newStatus == TrackableBehaviour.Status.NO_POSE)
{
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost");
OnTrackingLost();
}
else
{
// For combo of previousStatus=UNKNOWN + newStatus=UNKNOWN|NOT_FOUND
// Vuforia is starting, but tracking has not been lost or found yet
// Call OnTrackingLost() to hide the augmentations
OnTrackingLost();
}
}
#endregion // PUBLIC_METHODS
#region PROTECTED_METHODS
protected virtual void OnTrackingFound()
{
quad.Play();
var rendererComponents = GetComponentsInChildren<Renderer>(true);
var colliderComponents = GetComponentsInChildren<Collider>(true);
var canvasComponents = GetComponentsInChildren<Canvas>(true);
// Enable rendering:
foreach (var component in rendererComponents)
component.enabled = true;
// Enable colliders:
foreach (var component in colliderComponents)
component.enabled = true;
// Enable canvas':
foreach (var component in canvasComponents)
component.enabled = true;
}
protected virtual void OnTrackingLost()
{
quad.Stop();
var rendererComponents = GetComponentsInChildren<Renderer>(true);
var colliderComponents = GetComponentsInChildren<Collider>(true);
var canvasComponents = GetComponentsInChildren<Canvas>(true);
// Disable rendering:
foreach (var component in rendererComponents)
component.enabled = false;
// Disable colliders:
foreach (var component in colliderComponents)
component.enabled = false;
// Disable canvas':
foreach (var component in canvasComponents)
component.enabled = false;
}
#endregion // PROTECTED_METHODS
}
Hope you guys can help. Thank you :)
You need to add code to play and pause video player in OnTrackingFound() and OnTrackingLost()respectively.
For OnTrackingFound()
if (mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>() != null)
{
mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>().Play();
}
For OnTrackingLost()
if (mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>() != null)
{
mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>().Pause();
}
Complete script. Replace your script with this code.
using UnityEngine;
using Vuforia;
using UnityEngine.Video; //added
/// <summary>
/// A custom handler that implements the ITrackableEventHandler interface.
/// </summary>
public class DefaultTrackableEventHandler : MonoBehaviour, ITrackableEventHandler
{
#region PRIVATE_MEMBER_VARIABLES
protected TrackableBehaviour mTrackableBehaviour;
#endregion // PRIVATE_MEMBER_VARIABLES
#region UNTIY_MONOBEHAVIOUR_METHODS
protected virtual void Start()
{
mTrackableBehaviour = GetComponent<TrackableBehaviour>();
if (mTrackableBehaviour)
mTrackableBehaviour.RegisterTrackableEventHandler(this);
}
#endregion // UNTIY_MONOBEHAVIOUR_METHODS
#region PUBLIC_METHODS
/// <summary>
/// Implementation of the ITrackableEventHandler function called when the
/// tracking state changes.
/// </summary>
public void OnTrackableStateChanged(
TrackableBehaviour.Status previousStatus,
TrackableBehaviour.Status newStatus)
{
if (newStatus == TrackableBehaviour.Status.DETECTED ||
newStatus == TrackableBehaviour.Status.TRACKED ||
newStatus == TrackableBehaviour.Status.EXTENDED_TRACKED)
{
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found");
OnTrackingFound();
}
else if (previousStatus == TrackableBehaviour.Status.TRACKED &&
newStatus == TrackableBehaviour.Status.NO_POSE)
{
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost");
OnTrackingLost();
}
else
{
// For combo of previousStatus=UNKNOWN + newStatus=UNKNOWN|NOT_FOUND
// Vuforia is starting, but tracking has not been lost or found yet
// Call OnTrackingLost() to hide the augmentations
OnTrackingLost();
}
}
#endregion // PUBLIC_METHODS
#region PRIVATE_METHODS
protected virtual void OnTrackingFound()
{
var rendererComponents = GetComponentsInChildren<Renderer>(true);
var colliderComponents = GetComponentsInChildren<Collider>(true);
var canvasComponents = GetComponentsInChildren<Canvas>(true);
foreach (var component in rendererComponents)
component.enabled = true;
// Enable colliders:
foreach (var component in colliderComponents)
component.enabled = true;
// Enable canvas':
foreach (var component in canvasComponents)
component.enabled = true;
// added
if (mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>() != null)
{
mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>().Play();
}
}
protected virtual void OnTrackingLost()
{
var rendererComponents = GetComponentsInChildren<Renderer>(true);
var colliderComponents = GetComponentsInChildren<Collider>(true);
var canvasComponents = GetComponentsInChildren<Canvas>(true);
// Disable rendering:
foreach (var component in rendererComponents)
component.enabled = false;
// Disable colliders:
foreach (var component in colliderComponents)
component.enabled = false;
// Disable canvas':
foreach (var component in canvasComponents)
component.enabled = false;
// added
if (mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>() != null)
{
mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>().Pause();
}
}
#endregion // PRIVATE_METHODS
}

How to fix "gravity issue in unity AR project"

Project: Augmented game (vuforia)
I have a plane over which a ball resides.
The plane has rigid body and mesh collider on it.
The ball has a sphere collider and rigid body on it with gravity turned on.
As i play the game,the ball falls through the plane and keeps on falling.
I want the ball to stay on the plane so that i can control it with touch input.
I had this problem and I solved using the I TrackableEventHandler.
Look at this script:
using System.Collections;
using UnityEngine;
using Vuforia;
public class MarkerController: MonoBehaviour, ITrackableEventHandler
{
private TrackableBehaviour mTrackableBehavior;
// Start is called before the first frame update
void Start()
{
mTrackableBehavior = GetComponent<TrackableBehaviour>();
if (mTrackableBehavior)
{
mTrackableBehavior.RegisterTrackableEventHandler(this);
}
}
public void OnTrackableStateChanged(TrackableBehaviour.Status previousStatus, TrackableBehaviour.Status newStatus)
{
if(newStatus == TrackableBehaviour.Status.DETECTED || newStatus == TrackableBehaviour.Status.TRACKED || newStatus == TrackableBehaviour.Status.EXTENDED_TRACKED)
{
OnTrackingFound();
} else
{
OnTrackingLost();
}
}
private void OnTrackingFound()
{
Renderer[] rendererComponents = GetComponentsInChildren<Renderer>();
Collider[] colliderComponents = GetComponentsInChildren<Collider>();
for (int i = 0; i < this.transform.GetChildCount(); i++)
{
Debug.Log("Activating the children");
this.transform.GetChild(i).gameObject.SetActive(true);
}
foreach(Renderer component in rendererComponents)
{
component.enabled = true;
}
foreach(Collider component in colliderComponents)
{
component.enabled = true;
}
Debug.Log("Tracking of " + mTrackableBehavior.TrackableName + " found");
}
private void OnTrackingLost()
{
Renderer[] rendererComponents = GetComponentsInChildren<Renderer>();
Collider[] colliderComponents = GetComponentsInChildren<Collider>();
for (int i = 0; i < this.transform.GetChildCount(); i++)
{
Debug.Log("Deactivating children");
this.transform.GetChild(i).gameObject.SetActive(false);
}
foreach (Renderer component in rendererComponents)
{
component.enabled = false;
}
foreach (Collider component in colliderComponents)
{
component.enabled = false;
}
Debug.Log("Tracking of " + mTrackableBehavior.TrackableName + " lost");
}
}
In this class, I've only activated the components when the tracking of the markers is found! Otherwise it will be deactivated, and when the tracking is found again, then it will be displayed as the way it was before losing track.
I've added this script to the ARCamera component. Works fine here!

unity piano game trouble with playback of recorded notes

i have a piano app in unity and i am using an ArrayList to add notes and then playing it back and it workes mostly but the playback method since it activates when playMode is true, it continues on a loop, i need to know how i can use an if statement to check if all the recorded notes have been played and if they have then the loop should break. i really need help.
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class NotePlay: MonoBehaviour
{
public Button record, play;
public bool recordMode = false;
public bool playMode = false;
Animator anim;
public AudioClip noteA;
public AudioClip noteB;
public AudioClip noteC;
public AudioClip noteD;
public AudioClip noteE;
public AudioClip noteF;
public AudioClip noteG;
public AudioSource audio;
public int count = 0;
// Use this for initialization
public ArrayList notes;
void Start()
{
anim = gameObject.GetComponent<Animator>();
audio = GetComponent<AudioSource>();
notes = new ArrayList();
Button rec = record.GetComponent<Button>();
Button pl = play.GetComponent<Button>();
record.onClick.AddListener(()=>{ recordMode = !recordMode; });
play.onClick.AddListener(() => { playMode = !playMode; recordMode =
false; });
}
void Playback()
{
bool flag = true;
for (int i = 0; i < notes.Count && flag; i++)
{
char c = (char)notes[i];
System.Threading.Thread.Sleep(1000);
PlayNote(c);
print(c);
// i want the recorded notes to only play once
if ()
{
}
}
}
void PlayNote(char note)
{
if (note == 'a')
{
//anim.SetTrigger("A");
GameObject.Find("Sphere_A").GetComponent<AudioSource>
().PlayOneShot(noteA);
GameObject.Find("Sphere_A").GetComponent<Animator>
().SetTrigger("A");
print("a");
}
if (note == 'b')
{
//anim.SetTrigger("B");
GameObject.Find("Sphere_B").GetComponent<AudioSource>
().PlayOneShot(noteB);
GameObject.Find("Sphere_B").GetComponent<Animator>
().SetTrigger("B");
print("b");
}
if (note == 'c')
{
///anim.SetTrigger("C");
GameObject.Find("Sphere_C").GetComponent<AudioSource>
().PlayOneShot(noteC);
GameObject.Find("Sphere_C").GetComponent<Animator>
().SetTrigger("C");
}
if (note == 'd')
{
//anim.SetTrigger("D");
GameObject.Find("Sphere_D").GetComponent<AudioSource>
().PlayOneShot(noteD);
GameObject.Find("Sphere_D").GetComponent<Animator>
().SetTrigger("D");
}
if (note == 'e')
{
//anim.SetTrigger("E");
GameObject.Find("Sphere_E").GetComponent<AudioSource>
().PlayOneShot(noteE);
GameObject.Find("Sphere_E").GetComponent<Animator>
().SetTrigger("E");
}
else if (note == 'f')
{
// anim.SetTrigger("F");
GameObject.Find("Sphere_F").GetComponent<AudioSource>
().PlayOneShot(noteF);
GameObject.Find("Sphere_F").GetComponent<Animator>
().SetTrigger("F");
}
else if (note == 'g')
{
//anim.SetTrigger("G");
GameObject.Find("Sphere_G").GetComponent<AudioSource>
().PlayOneShot(noteG);
GameObject.Find("Sphere_G").GetComponent<Animator>
().SetTrigger("G");
}
}
// Update is called once per frame
void Update()
{
if (recordMode == true)
{
if (Input.GetKeyDown(KeyCode.A)) {notes.Add('a'); PlayNote('a'); }
if (Input.GetKeyDown(KeyCode.B)) {notes.Add('b'); PlayNote('b'); }
if (Input.GetKeyDown(KeyCode.C)) {notes.Add('c'); PlayNote('c'); }
if (Input.GetKeyDown(KeyCode.D)) {notes.Add('d'); PlayNote('d'); }
if (Input.GetKeyDown(KeyCode.E)) {notes.Add('e'); PlayNote('e'); }
if (Input.GetKeyDown(KeyCode.F)) {notes.Add('f'); PlayNote('f'); }
if (Input.GetKeyDown(KeyCode.G)) {notes.Add('g'); PlayNote('g'); }
}
else
{
if (Input.GetKeyDown(KeyCode.A)) { PlayNote('a'); }
if (Input.GetKeyDown(KeyCode.B)) { PlayNote('b'); }
if (Input.GetKeyDown(KeyCode.C)) { PlayNote('c'); }
if (Input.GetKeyDown(KeyCode.D)) { PlayNote('d'); }
if (Input.GetKeyDown(KeyCode.E)) { PlayNote('e'); }
if (Input.GetKeyDown(KeyCode.F)) { PlayNote('f'); }
if (Input.GetKeyDown(KeyCode.G)) { PlayNote('g'); }
}
if (playMode == true )
{
Playback();
}
}
}
You may want to specify that the AudioSource don't have to loop the sound. Add this line to your Start method :
void Start()
{
anim = gameObject.GetComponent<Animator>();
audio = GetComponent<AudioSource>();
audio.loop = false; // <--------------- HERE
notes = new ArrayList();
Button rec = record.GetComponent<Button>();
Button pl = play.GetComponent<Button>();
record.onClick.AddListener(()=>{ recordMode = !recordMode; });
play.onClick.AddListener(() => { playMode = !playMode; recordMode = false; });
}
You can either do this in code as above or directly in the inspector of your AudioSource. I think the default value of the loop property is set to true, that's why it's looping when you play a note.