I have an issue here with Unity. I've tried to search the solution online, tested it, but doesn't work. Right now, I'm developing an AR application with Unity and Vuforia. I'm using multiple image targets with 3D objects and video.
The issue is that the video will autoplay without scanning the target yet. I've unticked the 'play on awake' but it will only show me a white quad (as I attached the video on the quad object).
I've also tried to add quad.Play() & quad.Stop() on DefaultTrackableEventHandler script as some of the people manage to get it work.
Here are the scripts:
/*==============================================================================
Copyright (c) 2017 PTC Inc. All Rights Reserved.
Copyright (c) 2010-2014 Qualcomm Connected Experiences, Inc.
All Rights Reserved.
Confidential and Proprietary - Protected under copyright and other laws.
==============================================================================*/
using UnityEngine;
using Vuforia;
/// <summary>
/// A custom handler that implements the ITrackableEventHandler interface.
///
/// Changes made to this file could be overwritten when upgrading the Vuforia version.
/// When implementing custom event handler behavior, consider inheriting from this class instead.
/// </summary>
public class DefaultTrackableEventHandler : MonoBehaviour, ITrackableEventHandler
{
public UnityEngine.Video.VideoPlayer quad;
#region PROTECTED_MEMBER_VARIABLES
protected TrackableBehaviour mTrackableBehaviour;
protected TrackableBehaviour.Status m_PreviousStatus;
protected TrackableBehaviour.Status m_NewStatus;
#endregion // PROTECTED_MEMBER_VARIABLES
#region UNITY_MONOBEHAVIOUR_METHODS
protected virtual void Start()
{
mTrackableBehaviour = GetComponent<TrackableBehaviour>();
if (mTrackableBehaviour)
mTrackableBehaviour.RegisterTrackableEventHandler(this);
}
protected virtual void OnDestroy()
{
if (mTrackableBehaviour)
mTrackableBehaviour.UnregisterTrackableEventHandler(this);
}
#endregion // UNITY_MONOBEHAVIOUR_METHODS
#region PUBLIC_METHODS
/// <summary>
/// Implementation of the ITrackableEventHandler function called when the
/// tracking state changes.
/// </summary>
public void OnTrackableStateChanged(
TrackableBehaviour.Status previousStatus,
TrackableBehaviour.Status newStatus)
{
m_PreviousStatus = previousStatus;
m_NewStatus = newStatus;
if (newStatus == TrackableBehaviour.Status.DETECTED ||
newStatus == TrackableBehaviour.Status.TRACKED ||
newStatus == TrackableBehaviour.Status.EXTENDED_TRACKED)
{
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found");
OnTrackingFound();
}
else if (previousStatus == TrackableBehaviour.Status.TRACKED &&
newStatus == TrackableBehaviour.Status.NO_POSE)
{
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost");
OnTrackingLost();
}
else
{
// For combo of previousStatus=UNKNOWN + newStatus=UNKNOWN|NOT_FOUND
// Vuforia is starting, but tracking has not been lost or found yet
// Call OnTrackingLost() to hide the augmentations
OnTrackingLost();
}
}
#endregion // PUBLIC_METHODS
#region PROTECTED_METHODS
protected virtual void OnTrackingFound()
{
quad.Play();
var rendererComponents = GetComponentsInChildren<Renderer>(true);
var colliderComponents = GetComponentsInChildren<Collider>(true);
var canvasComponents = GetComponentsInChildren<Canvas>(true);
// Enable rendering:
foreach (var component in rendererComponents)
component.enabled = true;
// Enable colliders:
foreach (var component in colliderComponents)
component.enabled = true;
// Enable canvas':
foreach (var component in canvasComponents)
component.enabled = true;
}
protected virtual void OnTrackingLost()
{
quad.Stop();
var rendererComponents = GetComponentsInChildren<Renderer>(true);
var colliderComponents = GetComponentsInChildren<Collider>(true);
var canvasComponents = GetComponentsInChildren<Canvas>(true);
// Disable rendering:
foreach (var component in rendererComponents)
component.enabled = false;
// Disable colliders:
foreach (var component in colliderComponents)
component.enabled = false;
// Disable canvas':
foreach (var component in canvasComponents)
component.enabled = false;
}
#endregion // PROTECTED_METHODS
}
Hope you guys can help. Thank you :)
You need to add code to play and pause video player in OnTrackingFound() and OnTrackingLost()respectively.
For OnTrackingFound()
if (mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>() != null)
{
mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>().Play();
}
For OnTrackingLost()
if (mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>() != null)
{
mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>().Pause();
}
Complete script. Replace your script with this code.
using UnityEngine;
using Vuforia;
using UnityEngine.Video; //added
/// <summary>
/// A custom handler that implements the ITrackableEventHandler interface.
/// </summary>
public class DefaultTrackableEventHandler : MonoBehaviour, ITrackableEventHandler
{
#region PRIVATE_MEMBER_VARIABLES
protected TrackableBehaviour mTrackableBehaviour;
#endregion // PRIVATE_MEMBER_VARIABLES
#region UNTIY_MONOBEHAVIOUR_METHODS
protected virtual void Start()
{
mTrackableBehaviour = GetComponent<TrackableBehaviour>();
if (mTrackableBehaviour)
mTrackableBehaviour.RegisterTrackableEventHandler(this);
}
#endregion // UNTIY_MONOBEHAVIOUR_METHODS
#region PUBLIC_METHODS
/// <summary>
/// Implementation of the ITrackableEventHandler function called when the
/// tracking state changes.
/// </summary>
public void OnTrackableStateChanged(
TrackableBehaviour.Status previousStatus,
TrackableBehaviour.Status newStatus)
{
if (newStatus == TrackableBehaviour.Status.DETECTED ||
newStatus == TrackableBehaviour.Status.TRACKED ||
newStatus == TrackableBehaviour.Status.EXTENDED_TRACKED)
{
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found");
OnTrackingFound();
}
else if (previousStatus == TrackableBehaviour.Status.TRACKED &&
newStatus == TrackableBehaviour.Status.NO_POSE)
{
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost");
OnTrackingLost();
}
else
{
// For combo of previousStatus=UNKNOWN + newStatus=UNKNOWN|NOT_FOUND
// Vuforia is starting, but tracking has not been lost or found yet
// Call OnTrackingLost() to hide the augmentations
OnTrackingLost();
}
}
#endregion // PUBLIC_METHODS
#region PRIVATE_METHODS
protected virtual void OnTrackingFound()
{
var rendererComponents = GetComponentsInChildren<Renderer>(true);
var colliderComponents = GetComponentsInChildren<Collider>(true);
var canvasComponents = GetComponentsInChildren<Canvas>(true);
foreach (var component in rendererComponents)
component.enabled = true;
// Enable colliders:
foreach (var component in colliderComponents)
component.enabled = true;
// Enable canvas':
foreach (var component in canvasComponents)
component.enabled = true;
// added
if (mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>() != null)
{
mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>().Play();
}
}
protected virtual void OnTrackingLost()
{
var rendererComponents = GetComponentsInChildren<Renderer>(true);
var colliderComponents = GetComponentsInChildren<Collider>(true);
var canvasComponents = GetComponentsInChildren<Canvas>(true);
// Disable rendering:
foreach (var component in rendererComponents)
component.enabled = false;
// Disable colliders:
foreach (var component in colliderComponents)
component.enabled = false;
// Disable canvas':
foreach (var component in canvasComponents)
component.enabled = false;
// added
if (mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>() != null)
{
mTrackableBehaviour.gameObject.GetComponentInChildren<VideoPlayer>().Pause();
}
}
#endregion // PRIVATE_METHODS
}
Related
enter image description hereHow can I get a Target name from an image target to play a video player when finding a marker in an augmented reality app. basically I have this script and I want to modify it so that when detecting a marker it shows a 3d augmented reality object and on the side a video.Here's the script:
/*==============================================================================
Copyright (c) 2021 PTC Inc. All Rights Reserved.
Confidential and Proprietary - Protected under copyright and other laws.
Vuforia is a trademark of PTC Inc., registered in the United States and other
countries.
==============================================================================*/
using System;
using UnityEngine;
using UnityEngine.Events;
using Vuforia;
using UnityEngine.Video;
/// <summary>
/// A custom handler that implements the ITrackableEventHandler interface.
///
/// Changes made to this file could be overwritten when upgrading the Vuforia version.
/// When implementing custom event handler behavior, consider inheriting from this class instead.
/// </summary>
/// </summary>
public class ScriptModificado : MonoBehaviour
{
public enum TrackingStatusFilter
{
Tracked,
Tracked_ExtendedTracked,
Tracked_ExtendedTracked_Limited
}
/// <summary>
/// A filter that can be set to either:
/// - Only consider a target if it's in view (TRACKED)
/// - Also consider the target if's outside of the view, but the environment is tracked (EXTENDED_TRACKED)
/// - Even consider the target if tracking is in LIMITED mode, e.g. the environment is just 3dof tracked.
/// </summary>
public TrackingStatusFilter StatusFilter = TrackingStatusFilter.Tracked_ExtendedTracked_Limited;
public bool UsePoseSmoothing = false;
public AnimationCurve AnimationCurve = AnimationCurve.Linear(0, 0, LERP_DURATION, 1);
public UnityEvent OnTargetFound;
public UnityEvent OnTargetLost;
protected ObserverBehaviour mObserverBehaviour;
protected TargetStatus mPreviousTargetStatus = TargetStatus.NotObserved;
protected bool mCallbackReceivedOnce;
const float LERP_DURATION = 0.3f;
PoseSmoother mPoseSmoother;
// public AudioSource aSource;
// public AudioClip aClip;
public VideoPlayer vPlayer;
public GameObject BotonReproducir;
protected virtual void Start()
{
mObserverBehaviour = GetComponent<ObserverBehaviour>();
if (mObserverBehaviour)
{
mObserverBehaviour.OnTargetStatusChanged += OnObserverStatusChanged;
mObserverBehaviour.OnBehaviourDestroyed += OnObserverDestroyed;
OnObserverStatusChanged(mObserverBehaviour, mObserverBehaviour.TargetStatus);
SetupPoseSmoothing();
}
}
protected virtual void OnDestroy()
{
if (VuforiaBehaviour.Instance != null)
VuforiaBehaviour.Instance.World.OnStateUpdated -= OnStateUpdated;
if (mObserverBehaviour)
OnObserverDestroyed(mObserverBehaviour);
mPoseSmoother?.Dispose();
}
void OnObserverDestroyed(ObserverBehaviour observer)
{
mObserverBehaviour.OnTargetStatusChanged -= OnObserverStatusChanged;
mObserverBehaviour.OnBehaviourDestroyed -= OnObserverDestroyed;
mObserverBehaviour = null;
}
void OnObserverStatusChanged(ObserverBehaviour behaviour, TargetStatus targetStatus)
{
var name = mObserverBehaviour.TargetName;
if (mObserverBehaviour is VuMarkBehaviour vuMarkBehaviour && vuMarkBehaviour.InstanceId != null)
{
name += " (" + vuMarkBehaviour.InstanceId + ")";
}
Debug.Log($"Target status: {name} {targetStatus.Status} -- {targetStatus.StatusInfo}");
HandleTargetStatusChanged(mPreviousTargetStatus.Status, targetStatus.Status);
HandleTargetStatusInfoChanged(targetStatus.StatusInfo);
mPreviousTargetStatus = targetStatus;
}
protected virtual void HandleTargetStatusChanged(Status previousStatus, Status newStatus)
{
var shouldBeRendererBefore = ShouldBeRendered(previousStatus);
var shouldBeRendererNow = ShouldBeRendered(newStatus);
if (shouldBeRendererBefore != shouldBeRendererNow)
{
if (shouldBeRendererNow)
{
OnTrackingFound();
/*if (mObserverBehaviour.TargetName == "nocturno")
{
// aSource.PlayOneShot(aClip);
} */
if (mObserverBehaviour.TargetName == "nocturno")
{
vPlayer.Play();
}
}
else
{
OnTrackingLost();
if (mObserverBehaviour.TargetName == "nocturno")
{
// aSource.Stop();
vPlayer.Stop();
BotonReproducir.SetActive(false);
}
}
}
else
{
if (!mCallbackReceivedOnce && !shouldBeRendererNow)
{
// This is the first time we are receiving this callback, and the target is not visible yet.
// --> Hide the augmentation.
OnTrackingLost();
}
}
mCallbackReceivedOnce = true;
}
protected virtual void HandleTargetStatusInfoChanged(StatusInfo newStatusInfo)
{
if (newStatusInfo == StatusInfo.WRONG_SCALE)
{
Debug.LogErrorFormat("The target {0} appears to be scaled incorrectly. " +
"This might result in tracking issues. " +
"Please make sure that the target size corresponds to the size of the " +
"physical object in meters and regenerate the target or set the correct " +
"size in the target's inspector.", mObserverBehaviour.TargetName);
}
}
protected bool ShouldBeRendered(Status status)
{
if (status == Status.TRACKED)
{
// always render the augmentation when status is TRACKED, regardless of filter
return true;
}
if (StatusFilter == TrackingStatusFilter.Tracked_ExtendedTracked && status == Status.EXTENDED_TRACKED)
{
// also return true if the target is extended tracked
return true;
}
if (StatusFilter == TrackingStatusFilter.Tracked_ExtendedTracked_Limited &&
(status == Status.EXTENDED_TRACKED || status == Status.LIMITED))
{
// in this mode, render the augmentation even if the target's tracking status is LIMITED.
// this is mainly recommended for Anchors.
return true;
}
return false;
}
protected virtual void OnTrackingFound()
{
if (mObserverBehaviour)
{
var rendererComponents = mObserverBehaviour.GetComponentsInChildren<Renderer>(true);
var colliderComponents = mObserverBehaviour.GetComponentsInChildren<Collider>(true);
var canvasComponents = mObserverBehaviour.GetComponentsInChildren<Canvas>(true);
// Enable rendering:
foreach (var component in rendererComponents)
component.enabled = true;
// Enable colliders:
foreach (var component in colliderComponents)
component.enabled = true;
// Enable canvas':
foreach (var component in canvasComponents)
component.enabled = true;
}
OnTargetFound?.Invoke();
}
protected virtual void OnTrackingLost()
{
if (mObserverBehaviour)
{
var rendererComponents = mObserverBehaviour.GetComponentsInChildren<Renderer>(true);
var colliderComponents = mObserverBehaviour.GetComponentsInChildren<Collider>(true);
var canvasComponents = mObserverBehaviour.GetComponentsInChildren<Canvas>(true);
// Disable rendering:
foreach (var component in rendererComponents)
component.enabled = false;
// Disable colliders:
foreach (var component in colliderComponents)
component.enabled = false;
// Disable canvas':
foreach (var component in canvasComponents)
component.enabled = false;
}
OnTargetLost?.Invoke();
}
protected void SetupPoseSmoothing()
{
UsePoseSmoothing &= VuforiaBehaviour.Instance.WorldCenterMode == WorldCenterMode.DEVICE; // pose smoothing only works with the DEVICE world center mode
mPoseSmoother = new PoseSmoother(mObserverBehaviour, AnimationCurve);
VuforiaBehaviour.Instance.World.OnStateUpdated += OnStateUpdated;
}
void OnStateUpdated()
{
if (enabled && UsePoseSmoothing)
mPoseSmoother.Update();
}
class PoseSmoother
{
const float e = 0.001f;
const float MIN_ANGLE = 2f;
PoseLerp mActivePoseLerp;
Pose mPreviousPose;
readonly ObserverBehaviour mTarget;
readonly AnimationCurve mAnimationCurve;
TargetStatus mPreviousStatus;
public PoseSmoother(ObserverBehaviour target, AnimationCurve animationCurve)
{
mTarget = target;
mAnimationCurve = animationCurve;
}
public void Update()
{
var currentPose = new Pose(mTarget.transform.position, mTarget.transform.rotation);
var currentStatus = mTarget.TargetStatus;
UpdatePoseSmoothing(currentPose, currentStatus);
mPreviousPose = currentPose;
mPreviousStatus = currentStatus;
}
void UpdatePoseSmoothing(Pose currentPose, TargetStatus currentTargetStatus)
{
if (mActivePoseLerp == null && ShouldSmooth(currentPose, currentTargetStatus))
{
mActivePoseLerp = new PoseLerp(mPreviousPose, currentPose, mAnimationCurve);
}
if (mActivePoseLerp != null)
{
var pose = mActivePoseLerp.GetSmoothedPosition(Time.deltaTime);
mTarget.transform.SetPositionAndRotation(pose.position, pose.rotation);
if (mActivePoseLerp.Complete)
{
mActivePoseLerp = null;
}
}
}
/// Smooth pose transition if the pose changed and the target is still being reported as "extended tracked" or it has just returned to
/// "tracked" from previously being "extended tracked"
bool ShouldSmooth(Pose currentPose, TargetStatus currentTargetStatus)
{
return (currentTargetStatus.Status == Status.EXTENDED_TRACKED || (currentTargetStatus.Status == Status.TRACKED && mPreviousStatus.Status == Status.EXTENDED_TRACKED)) &&
(Vector3.SqrMagnitude(currentPose.position - mPreviousPose.position) > e || Quaternion.Angle(currentPose.rotation, mPreviousPose.rotation) > MIN_ANGLE);
}
public void Dispose()
{
mActivePoseLerp = null;
}
}
class PoseLerp
{
readonly AnimationCurve mCurve;
readonly Pose mStartPose;
readonly Pose mEndPose;
readonly float mEndTime;
float mElapsedTime;
public bool Complete { get; private set; }
public PoseLerp(Pose startPose, Pose endPose, AnimationCurve curve)
{
mStartPose = startPose;
mEndPose = endPose;
mCurve = curve;
mEndTime = mCurve.keys[mCurve.length - 1].time;
}
public Pose GetSmoothedPosition(float deltaTime)
{
mElapsedTime += deltaTime;
if (mElapsedTime >= mEndTime)
{
mElapsedTime = 0;
Complete = true;
return mEndPose;
}
var ratio = mCurve.Evaluate(mElapsedTime);
var smoothPosition = Vector3.Lerp(mStartPose.position, mEndPose.position, ratio);
var smoothRotation = Quaternion.Slerp(mStartPose.rotation, mEndPose.rotation, ratio);
return new Pose(smoothPosition, smoothRotation);
}
}
}
So how do I get the target name of the mObserverBehaviour method to change it to the "night" one and put the target name of my image target that will play the video? and how can I modify this code to show a video when the vuforia AR camera detects the image target so that the video is displayed on the side in a 2d Sprite object so that it looks more or less like this video: https://www.youtube.com/watch?v=izJatV5ypvM
attached imagesenter image description here
[1]: https://i.stack.imgur.com/Buleh.png
enter image description here
enter image description here
enter image description here
There is a problem on my project that when it plays the video play automatically. How I want to make the video play and pause in Unity. I'm using Unity 2019.3.5f1 and Vuforia. Is there any tutorial about how to make video playback AR can play and pause?
I try this code but it doesn't work. I don't know why?
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using Vuforia;
using UnityEngine.Video;
public class PlayControl : MonoBehaviour, ITrackableEventHandler
{
public GameObject videoplayer, Playvideo_button, Pausevideo_button, Stopvideo_button;
protected TrackableBehaviour mTrackableBehaviour;
public void OnTrackableStateChanged(TrackableBehaviour.Status previousStatus, TrackableBehaviour.Status newStatus)
{
if (newStatus == TrackableBehaviour.Status.DETECTED ||
newStatus == TrackableBehaviour.Status.TRACKED ||
newStatus == TrackableBehaviour.Status.EXTENDED_TRACKED)
{
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found");
if (mTrackableBehaviour.TrackableName == "Video_marker")
{
videoplayer.GetComponent<VideoPlayer>().Play();
}
OnTrackingFound();
}
else if (previousStatus == TrackableBehaviour.Status.TRACKED &&
newStatus == TrackableBehaviour.Status.NO_POSE)
{
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost");
videoplayer.GetComponent<VideoPlayer>().Stop();
OnTrackingLost();
}
else
{
// For combo of previousStatus=UNKNOWN + newStatus=UNKNOWN|NOT_FOUND
// Vuforia is starting, but tracking has not been lost or found yet
// Call OnTrackingLost() to hide the augmentations
OnTrackingLost();
}
}
// Start is called before the first frame update
void Start()
{
mTrackableBehaviour = GetComponent<TrackableBehaviour>();
if(mTrackableBehaviour)
{
mTrackableBehaviour.RegisterTrackableEventHandler(this);
}
}
protected virtual void OnTrackingFound()
{
var rendererComponents = GetComponentsInChildren<Renderer>(true);
var colliderComponents = GetComponentsInChildren<Collider>(true);
var canvasComponents = GetComponentsInChildren<Canvas>(true);
// Enable rendering:
foreach (var component in rendererComponents)
component.enabled = true;
// Enable colliders:
foreach (var component in colliderComponents)
component.enabled = true;
// Enable canvas':
foreach (var component in canvasComponents)
component.enabled = true;
}
protected virtual void OnTrackingLost()
{
var rendererComponents = GetComponentsInChildren<Renderer>(true);
var colliderComponents = GetComponentsInChildren<Collider>(true);
var canvasComponents = GetComponentsInChildren<Canvas>(true);
// Disable rendering:
foreach (var component in rendererComponents)
component.enabled = false;
// Disable colliders:
foreach (var component in colliderComponents)
component.enabled = false;
// Disable canvas':
foreach (var component in canvasComponents)
component.enabled = false;
}
// Update is called once per frame
void Update()
{
if (Input.GetMouseButtonDown(0))
{
Ray ray = Camera.main.ScreenPointToRay(Input.mousePosition);
RaycastHit hit;
if (Physics.Raycast(ray, out hit))
{
//case 1
if (hit.collider.tag == "Playvideo")
{
videoplayer.GetComponent<VideoPlayer>().Play();
Playvideo_button.SetActive(false);
Pausevideo_button.SetActive(true);
Stopvideo_button.SetActive(true);
}
//case 2
if (hit.collider.tag == "Stopvideo")
{
videoplayer.GetComponent<VideoPlayer>().Stop();
Playvideo_button.SetActive(true);
Pausevideo_button.SetActive(true);
Stopvideo_button.SetActive(false);
}
//case 3
if (hit.collider.tag == "Pausevideo")
{
videoplayer.GetComponent<VideoPlayer>().Pause();
Playvideo_button.SetActive(true);
Pausevideo_button.SetActive(false);
Stopvideo_button.SetActive(true);
}
}
}
}
}
It's not really clear where does this script runs in your project. The best way to make sure your hits are in the right place is to add the script to the buttons, which I see that you didn't. Did you make sure your Raycast reaches the right button? you can define a maxDistance to your Physics.Raycast that may be a great help here. Also in your "if" statements, I would check if there is a collision first and then check for the tag, something like this:
if (Physics.Raycast(ray, out hit, 900.0f))
{
if (hit.transform != null)
{
if (GameObject.ReferenceEquals(hit.transform.gameObject, playButtonGameObject))
{
videoplayer.GetComponent<VideoPlayer>().Play();
Playvideo_button.SetActive(false);
Pausevideo_button.SetActive(true);
Stopvideo_button.SetActive(true);
}
else if (GameObject.ReferenceEquals(hit.transform.gameObject, stopButtonGameObject))
{
videoplayer.GetComponent<VideoPlayer>().Stop();
Playvideo_button.SetActive(true);
Pausevideo_button.SetActive(true);
Stopvideo_button.SetActive(false);
}
else if (GameObject.ReferenceEquals(hit.transform.gameObject, pauseButtonGameObject))
{
videoplayer.GetComponent<VideoPlayer>().Pause();
Playvideo_button.SetActive(true);
Pausevideo_button.SetActive(false);
Stopvideo_button.SetActive(true);
}
}
}
Project: Augmented game (vuforia)
I have a plane over which a ball resides.
The plane has rigid body and mesh collider on it.
The ball has a sphere collider and rigid body on it with gravity turned on.
As i play the game,the ball falls through the plane and keeps on falling.
I want the ball to stay on the plane so that i can control it with touch input.
I had this problem and I solved using the I TrackableEventHandler.
Look at this script:
using System.Collections;
using UnityEngine;
using Vuforia;
public class MarkerController: MonoBehaviour, ITrackableEventHandler
{
private TrackableBehaviour mTrackableBehavior;
// Start is called before the first frame update
void Start()
{
mTrackableBehavior = GetComponent<TrackableBehaviour>();
if (mTrackableBehavior)
{
mTrackableBehavior.RegisterTrackableEventHandler(this);
}
}
public void OnTrackableStateChanged(TrackableBehaviour.Status previousStatus, TrackableBehaviour.Status newStatus)
{
if(newStatus == TrackableBehaviour.Status.DETECTED || newStatus == TrackableBehaviour.Status.TRACKED || newStatus == TrackableBehaviour.Status.EXTENDED_TRACKED)
{
OnTrackingFound();
} else
{
OnTrackingLost();
}
}
private void OnTrackingFound()
{
Renderer[] rendererComponents = GetComponentsInChildren<Renderer>();
Collider[] colliderComponents = GetComponentsInChildren<Collider>();
for (int i = 0; i < this.transform.GetChildCount(); i++)
{
Debug.Log("Activating the children");
this.transform.GetChild(i).gameObject.SetActive(true);
}
foreach(Renderer component in rendererComponents)
{
component.enabled = true;
}
foreach(Collider component in colliderComponents)
{
component.enabled = true;
}
Debug.Log("Tracking of " + mTrackableBehavior.TrackableName + " found");
}
private void OnTrackingLost()
{
Renderer[] rendererComponents = GetComponentsInChildren<Renderer>();
Collider[] colliderComponents = GetComponentsInChildren<Collider>();
for (int i = 0; i < this.transform.GetChildCount(); i++)
{
Debug.Log("Deactivating children");
this.transform.GetChild(i).gameObject.SetActive(false);
}
foreach (Renderer component in rendererComponents)
{
component.enabled = false;
}
foreach (Collider component in colliderComponents)
{
component.enabled = false;
}
Debug.Log("Tracking of " + mTrackableBehavior.TrackableName + " lost");
}
}
In this class, I've only activated the components when the tracking of the markers is found! Otherwise it will be deactivated, and when the tracking is found again, then it will be displayed as the way it was before losing track.
I've added this script to the ARCamera component. Works fine here!
I'm using the unity watson SDK to take user's speech, parse it to text, and then pass the text to the SDK's 'textToSpeechWidget', http://i.imgur.com/4xBhZYx.png.
I've created a button "button (button)" that is meant to, when pressed, play the assosciated text input (which is pulling user speech). The 'speech to text' function is working- when a user speaks, the 'input text' ui is filled with their words. but, the program should then play the assosciated words when the user presses it. Instead, nothing happens, and the system doesn't indicate any errors or describe why. here is the TextToSpeech widget code, as well as the textToSpeech code.
using UnityEngine;
using UnityEngine.UI;
using IBM.Watson.DeveloperCloud.Services.TextToSpeech.v1;
using IBM.Watson.DeveloperCloud.Logging;
using IBM.Watson.DeveloperCloud.DataTypes;
using System.Collections.Generic;
using IBM.Watson.DeveloperCloud.Utilities;
#pragma warning disable 414
namespace IBM.Watson.DeveloperCloud.Widgets
{
/// <summary>
/// TextToSpeech widget class wraps the TextToSpeech serivce.
/// </summary>
[RequireComponent(typeof(AudioSource))]
public class TextToSpeechWidget : Widget
{
#region Inputs
[SerializeField]
private Input m_TextInput = new Input("Text", typeof(TextToSpeechData),
"OnTextInput");
[SerializeField]
private Input m_VoiceInput = new Input("Voice", typeof(VoiceData),
"OnVoiceSelect");
#endregion
#region Outputs
[SerializeField]
private Output m_Speaking = new Output(typeof(SpeakingStateData), true);
[SerializeField]
private Output m_DisableMic = new Output(typeof(DisableMicData));
[SerializeField]
private Output m_LevelOut = new Output(typeof(LevelData));
#endregion
#region Private Data
TextToSpeech m_TextToSpeech = new TextToSpeech();
[SerializeField, Tooltip("How often to send level out data in seconds.")]
private float m_LevelOutInterval = 0.05f;
[SerializeField]
private float m_LevelOutputModifier = 1.0f;
[SerializeField]
private Button m_TextToSpeechButton = null;
[SerializeField]
private InputField m_Input = null;
[SerializeField]
private Text m_StatusText = null;
[SerializeField]
private VoiceType m_Voice = VoiceType.en_US_Michael;
[SerializeField]
private bool m_UsePost = false;
private AudioSource m_Source = null;
private int m_LastPlayPos = 0;
private class Speech
{
~Speech()
{
if (Clip != null)
UnityObjectUtil.DestroyUnityObject(Clip);
}
public bool Ready { get; set; }
public AudioClip Clip { get; set; }
public Speech(TextToSpeech textToSpeech, string text, bool usePost)
{
textToSpeech.ToSpeech(text, OnAudioClip, usePost);
}
private void OnAudioClip(AudioClip clip)
{
Clip = clip;
Ready = true;
}
};
private Queue<Speech> m_SpeechQueue = new Queue<Speech>();
private Speech m_ActiveSpeech = null;
#endregion
#region Public Memebers
/// <summary>
/// Gets or sets the voice. Default voice is English, US - Michael
/// </summary>
/// <value>The voice.</value>
public VoiceType Voice
{
get
{
return m_Voice;
}
set
{
m_Voice = value;
}
}
#endregion
#region Event Handlers
/// <summary>
/// Button event handler.
/// </summary>
public void OnTextToSpeech()
{
if (m_TextToSpeech.Voice != m_Voice)
m_TextToSpeech.Voice = m_Voice;
if (m_Input != null)
m_SpeechQueue.Enqueue(new Speech(m_TextToSpeech, m_Input.text, m_UsePost));
if (m_StatusText != null)
m_StatusText.text = "THINKING";
if (m_TextToSpeechButton != null)
m_TextToSpeechButton.interactable = false;
}
#endregion
#region Private Functions
private void OnTextInput(Data data)
{
TextToSpeechData text = data as TextToSpeechData;
if (text == null)
throw new WatsonException("Wrong data type received.");
if (!string.IsNullOrEmpty(text.Text))
{
if (m_TextToSpeech.Voice != m_Voice)
m_TextToSpeech.Voice = m_Voice;
m_SpeechQueue.Enqueue(new Speech(m_TextToSpeech, text.Text, m_UsePost));
}
}
private void OnVoiceSelect(Data data)
{
VoiceData voice = data as VoiceData;
if (voice == null)
throw new WatsonException("Unexpected data type");
m_Voice = voice.Voice;
}
private void OnEnable()
{
UnityObjectUtil.StartDestroyQueue();
if (m_StatusText != null)
m_StatusText.text = "READY";
}
/// <exclude />
protected override void Start()
{
base.Start();
m_Source = GetComponent<AudioSource>();
}
private void Update()
{
if (m_Source != null && !m_Source.isPlaying
&& m_SpeechQueue.Count > 0
&& m_SpeechQueue.Peek().Ready)
{
CancelInvoke("OnEndSpeech");
m_ActiveSpeech = m_SpeechQueue.Dequeue();
if (m_ActiveSpeech.Clip != null)
{
if (m_Speaking.IsConnected)
m_Speaking.SendData(new SpeakingStateData(true));
if (m_DisableMic.IsConnected)
m_DisableMic.SendData(new DisableMicData(true));
m_Source.spatialBlend = 0.0f; // 2D sound
m_Source.loop = false; // do not loop
m_Source.clip = m_ActiveSpeech.Clip; // clip
m_Source.Play();
Invoke("OnEndSpeech", ((float)m_ActiveSpeech.Clip.samples / (float)m_ActiveSpeech.Clip.frequency) + 0.1f);
if (m_LevelOut.IsConnected)
{
m_LastPlayPos = 0;
InvokeRepeating("OnLevelOut", m_LevelOutInterval, m_LevelOutInterval);
}
}
else
{
Log.Warning("TextToSpeechWidget", "Skipping null AudioClip");
}
}
if (m_TextToSpeechButton != null)
m_TextToSpeechButton.interactable = true;
if (m_StatusText != null)
m_StatusText.text = "READY";
}
private void OnLevelOut()
{
if (m_Source != null && m_Source.isPlaying)
{
int currentPos = m_Source.timeSamples;
if (currentPos > m_LastPlayPos)
{
float[] samples = new float[currentPos - m_LastPlayPos];
m_Source.clip.GetData(samples, m_LastPlayPos);
m_LevelOut.SendData(new LevelData(Mathf.Max(samples) * m_LevelOutputModifier, m_LevelOutputModifier));
m_LastPlayPos = currentPos;
}
}
else
CancelInvoke("OnLevelOut");
}
private void OnEndSpeech()
{
if (m_Speaking.IsConnected)
m_Speaking.SendData(new SpeakingStateData(false));
if (m_DisableMic.IsConnected)
m_DisableMic.SendData(new DisableMicData(false));
if (m_Source.isPlaying)
m_Source.Stop();
m_ActiveSpeech = null;
}
/// <exclude />
protected override string GetName()
{
return "TextToSpeech";
}
#endregion
}
}
The serialized field asks for a button to play the audio, which I've provided. why does it not play the audio? Thanks so much for your help :)
In order to play the text via button (Button m_TextToSpeechButton), the button needs to call 'OnTextToSpeech' on click and connected to Widget. Then, you need to connect your text input field to widget as Input (InputField m_Input).
This should the trick.
i detect more than one image target in #unity when i detect target i play a sound that pronounce the image target name. if i detect one target it play the sound loud but when detect multiple target it play one of sound loud and play the others very low or does not play them.
please i want specific way to do this in tutorial or explain that in your answer .
using UnityEngine;
public class SoundPlay: MonoBehaviour,ITrackableEventHandler
{
#region PRIVATE_MEMBER_VARIABLES
private TrackableBehaviour mTrackableBehaviour;
public AudioClip sound;
#endregion // PRIVATE_MEMBER_VARIABLES
#region UNTIY_MONOBEHAVIOUR_METHODS
void Start()
{
mTrackableBehaviour = GetComponent<TrackableBehaviour>();
if (mTrackableBehaviour)
{
mTrackableBehaviour.RegisterTrackableEventHandler(this);
}
}
#endregion // UNTIY_MONOBEHAVIOUR_METHODS
#region PUBLIC_METHODS
/// <summary>
/// Implementation of the ITrackableEventHandler function called when the
/// tracking state changes.
/// </summary>
public void OnTrackableStateChanged(
TrackableBehaviour.Status previousStatus,
TrackableBehaviour.Status newStatus)
{
if (newStatus == TrackableBehaviour.Status.DETECTED ||
newStatus == TrackableBehaviour.Status.TRACKED)
{
OnTrackingFound();
}
else
{
OnTrackingLost();
}
}
#endregion // PUBLIC_METHODS
#region PRIVATE_METHODS
private void OnTrackingFound()
{
Renderer[] rendererComponents = GetComponentsInChildren<Renderer>(true);
Collider[] colliderComponents = GetComponentsInChildren<Collider>(true);
// Enable rendering:
foreach (Renderer component in rendererComponents)
{
component.enabled = true;
}
// Enable colliders:
foreach (Collider component in colliderComponents)
{
component.enabled = true;
}
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found");
GetComponentInChildren<fiveSound>().Play();
}
private void OnTrackingLost()
{
Renderer[] rendererComponents = GetComponentsInChildren<Renderer>(true);
Collider[] colliderComponents = GetComponentsInChildren<Collider>(true);
// Disable rendering:
foreach (Renderer component in rendererComponents)
{
component.enabled = false;
}
// Disable colliders:
foreach (Collider component in colliderComponents)
{
component.enabled = false;
}
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost");
}
public void Play(){
AudioSource.PlayClipAtPoint(sound, transform.position);
}
#endregion // PRIVATE_METHODS
}
Is 3D sound checked?
If it is then uncheck. Let me know how you get on!
^._.^
using UnityEngine;
using Vuforia;
public class SoundPlay: MonoBehaviour,ITrackableEventHandler
{
#region PRIVATE_MEMBER_VARIABLES
private TrackableBehaviour mTrackableBehaviour;
public AudioClip sound;
#endregion // PRIVATE_MEMBER_VARIABLES
#region UNTIY_MONOBEHAVIOUR_METHODS
void Start()
{
mTrackableBehaviour = GetComponent<TrackableBehaviour>();
if (mTrackableBehaviour)
{
mTrackableBehaviour.RegisterTrackableEventHandler(this);
}
}
#endregion // UNTIY_MONOBEHAVIOUR_METHODS
#region PUBLIC_METHODS
/// <summary>
/// Implementation of the ITrackableEventHandler function called when the
/// tracking state changes.
/// </summary>
public void OnTrackableStateChanged(
TrackableBehaviour.Status previousStatus,
TrackableBehaviour.Status newStatus)
{
if (newStatus == TrackableBehaviour.Status.DETECTED ||
newStatus == TrackableBehaviour.Status.TRACKED)
{
OnTrackingFound();
}
else
{
OnTrackingLost();
}
}
#endregion // PUBLIC_METHODS
#region PRIVATE_METHODS
private void OnTrackingFound()
{
Renderer[] rendererComponents = GetComponentsInChildren<Renderer>(true);
Collider[] colliderComponents = GetComponentsInChildren<Collider>(true);
// Enable rendering:
foreach (Renderer component in rendererComponents)
{
component.enabled = true;
}
// Enable colliders:
foreach (Collider component in colliderComponents)
{
component.enabled = true;
}
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " found");
GetComponentInChildren<AudioSource>().Play();
}
private void OnTrackingLost()
{
Renderer[] rendererComponents = GetComponentsInChildren<Renderer>(true);
Collider[] colliderComponents = GetComponentsInChildren<Collider>(true);
// Disable rendering:
foreach (Renderer component in rendererComponents)
{
component.enabled = false;
}
// Disable colliders:
foreach (Collider component in colliderComponents)
{
component.enabled = false;
}
Debug.Log("Trackable " + mTrackableBehaviour.TrackableName + " lost");
}
public void Play(){
AudioSource.PlayClipAtPoint(sound, transform.position);
}
#endregion // PRIVATE_METHODS
}