Azure Spatial Anchors / Unity World Anchors lose position - unity3d

I am working on a simple unity app testing azure spatial anchors on the HoloLens. I started from this example (https://github.com/Azure/azure-spatial-anchors-samples) and changed it a little bit to create several anchors.
In some test sessions I experienced that the anchored objects lost their position suddenly and where moved about 10 meters or more.
As I unterstand HoloLens and mixed reality the camera position is tracked by kind of visual odometry or rather SLAM algorithms so it is normal that the pose of the device drifts over the time and so will the anchors do. But I did not expect such a huge shift.
Furthermore I expected the anchors to get back in places in the moment the features in the neighborhood of the anchors are visible again for the devices camera. But this is not always the case. Sometimes the anchors get back at their original position when the features are visible again, but sometimes this does not change anything about the wrong positions.
This is the code:
using Microsoft.Azure.SpatialAnchors;
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using UnityEngine;
using UnityEngine.XR.WSA;
using UnityEngine.XR.WSA.Input;
using System.Linq;
using System.IO;
using UnityEditor;
public class AzureSpatialAnchorsScript : MonoBehaviour
{
/// <summary>
/// The sphere prefab.
/// </summary>
public GameObject spherePrefab;
/// <summary>
/// Set this string to the Spatial Anchors account id provided in the Spatial Anchors resource.
/// </summary>
protected string SpatialAnchorsAccountId = "xxxxxxxxxxxxxxxx";
/// <summary>
/// Set this string to the Spatial Anchors account key provided in the Spatial Anchors resource.
/// </summary>
protected string SpatialAnchorsAccountKey = "yyyyyyyyyyyyyyyyyyyyyyy";
/// <summary>
/// Our queue of actions that will be executed on the main thread.
/// </summary>
private readonly Queue<Action> dispatchQueue = new Queue<Action>();
/// <summary>
/// Use the recognizer to detect air taps.
/// </summary>
private GestureRecognizer recognizer;
protected CloudSpatialAnchorSession cloudSpatialAnchorSession;
/// <summary>
/// The CloudSpatialAnchor that we either 1) placed and are saving or 2) just located.
/// </summary>
protected CloudSpatialAnchor currentCloudAnchor;
/// <summary>
/// True if we are creating + saving an anchor
/// </summary>
protected bool tapExecuted = false;
/// <summary>
/// The IDs of the CloudSpatialAnchor that were saved. Use it to find the CloudSpatialAnchors
/// </summary>
protected Dictionary<string, GameObject> cloudSpatialAnchorIdsObjects = new Dictionary<string, GameObject> { };
protected IList<string> anchorIds = new List<string>();
/// <summary>
/// The sphere rendered to show the position of the CloudSpatialAnchor.
/// </summary>
protected Material sphereMaterial;
/// <summary>
/// Indicate if we are ready to save an anchor. We can save an anchor when value is greater than 1.
/// </summary>
protected float recommendedForCreate = 0;
private string pathName;
// Start is called before the first frame update
void Start()
{
Application.SetStackTraceLogType(LogType.Log, StackTraceLogType.None);
recognizer = new GestureRecognizer();
recognizer.StartCapturingGestures();
recognizer.SetRecognizableGestures(GestureSettings.Tap);
recognizer.Tapped += HandleTap;
InitializeSession();
string FileName = "ids.txt";
pathName = Path.Combine(Application.persistentDataPath, FileName);
getIds();
if (anchorIds.Count > 0)
{
CreateWatcher(anchorIds.ToArray());
}
}
// Update is called once per frame
void Update()
{
lock (dispatchQueue)
{
if (dispatchQueue.Count > 0)
{
dispatchQueue.Dequeue()();
}
}
}
/// <summary>
/// Queues the specified <see cref="Action"/> on update.
/// </summary>
/// <param name="updateAction">The update action.</param>
protected void QueueOnUpdate(Action updateAction)
{
lock (dispatchQueue)
{
dispatchQueue.Enqueue(updateAction);
}
}
/// <summary>
/// Cleans up objects.
/// </summary>
public void CleanupObjects()
{
if (cloudSpatialAnchorIdsObjects != null)
{
cloudSpatialAnchorIdsObjects = new Dictionary<string, GameObject>();
}
if (sphereMaterial != null)
{
Destroy(sphereMaterial);
sphereMaterial = null;
}
//currentCloudAnchor = null;
}
/// <summary>
/// Initializes a new CloudSpatialAnchorSession.
/// </summary>
void InitializeSession()
{
Debug.Log("ASA Info: Initializing a CloudSpatialAnchorSession.");
if (string.IsNullOrEmpty(SpatialAnchorsAccountId))
{
Debug.LogError("No account id set.");
return;
}
if (string.IsNullOrEmpty(SpatialAnchorsAccountKey))
{
Debug.LogError("No account key set.");
return;
}
cloudSpatialAnchorSession = new CloudSpatialAnchorSession();
cloudSpatialAnchorSession.Configuration.AccountId = SpatialAnchorsAccountId.Trim();
cloudSpatialAnchorSession.Configuration.AccountKey = SpatialAnchorsAccountKey.Trim();
cloudSpatialAnchorSession.LogLevel = SessionLogLevel.All;
cloudSpatialAnchorSession.Error += CloudSpatialAnchorSession_Error;
cloudSpatialAnchorSession.OnLogDebug += CloudSpatialAnchorSession_OnLogDebug;
cloudSpatialAnchorSession.SessionUpdated += CloudSpatialAnchorSession_SessionUpdated;
cloudSpatialAnchorSession.AnchorLocated += CloudSpatialAnchorSession_AnchorLocated;
cloudSpatialAnchorSession.LocateAnchorsCompleted += CloudSpatialAnchorSession_LocateAnchorsCompleted;
cloudSpatialAnchorSession.Start();
Debug.Log("ASA Info: Session was initialized.");
}
void CreateWatcher(string[] cloudSpatialAnchorIds)
{
Debug.Log("ASA Info: We will look for placeded anchors.");
// Create a Watcher to look for the anchor we created.
AnchorLocateCriteria criteria = new AnchorLocateCriteria();
criteria.Identifiers = cloudSpatialAnchorIds;
cloudSpatialAnchorSession.CreateWatcher(criteria);
Debug.Log("ASA Info: Watcher created. Number of active watchers: " + cloudSpatialAnchorSession.GetActiveWatchers().Count);
}
private void CloudSpatialAnchorSession_Error(object sender, SessionErrorEventArgs args)
{
Debug.LogError("ASA Error: " + args.ErrorMessage);
}
private void CloudSpatialAnchorSession_OnLogDebug(object sender, OnLogDebugEventArgs args)
{
Debug.Log("ASA Log: " + args.Message);
System.Diagnostics.Debug.WriteLine("ASA Log: " + args.Message);
}
private void CloudSpatialAnchorSession_SessionUpdated(object sender, SessionUpdatedEventArgs args)
{
Debug.Log("ASA Log: recommendedForCreate: " + args.Status.RecommendedForCreateProgress);
recommendedForCreate = args.Status.RecommendedForCreateProgress;
}
private void CloudSpatialAnchorSession_AnchorLocated(object sender, AnchorLocatedEventArgs args)
{
switch (args.Status)
{
case LocateAnchorStatus.Located:
Debug.Log("ASA Info: Anchor located! Identifier: " + args.Identifier);
QueueOnUpdate(() =>
{
// Create a green sphere.
GameObject spatialAnchorObj = GameObject.Instantiate(spherePrefab, Vector3.zero, Quaternion.identity) as GameObject;
spatialAnchorObj.AddComponent<WorldAnchor>();
sphereMaterial = spatialAnchorObj.GetComponent<MeshRenderer>().material;
sphereMaterial.color = Color.green;
// Get the WorldAnchor from the CloudSpatialAnchor and use it to position the sphere.
spatialAnchorObj.GetComponent<UnityEngine.XR.WSA.WorldAnchor>().SetNativeSpatialAnchorPtr(args.Anchor.LocalAnchor);
cloudSpatialAnchorIdsObjects.Add(args.Anchor.Identifier, spatialAnchorObj);
Debug.Log("Detected Pos: " + spatialAnchorObj.GetComponent<UnityEngine.XR.WSA.WorldAnchor>().transform.position.ToString("F4"));
Debug.Log("Detected Rot: " + spatialAnchorObj.GetComponent<UnityEngine.XR.WSA.WorldAnchor>().transform.rotation.ToString("F4"));
tapExecuted = false;
});
break;
case LocateAnchorStatus.AlreadyTracked:
Debug.Log("ASA Info: Anchor already tracked. Identifier: " + args.Identifier);
break;
case LocateAnchorStatus.NotLocated:
Debug.Log("ASA Info: Anchor not located. Identifier: " + args.Identifier);
break;
case LocateAnchorStatus.NotLocatedAnchorDoesNotExist:
Debug.LogError("ASA Error: Anchor not located does not exist. Identifier: " + args.Identifier);
break;
}
}
private void CloudSpatialAnchorSession_LocateAnchorsCompleted(object sender, LocateAnchorsCompletedEventArgs args)
{
Debug.Log("ASA Info: Locate anchors completed. Watcher identifier: " + args.Watcher.Identifier);
}
/// <summary>
/// Called by GestureRecognizer when a tap is detected.
/// </summary>
/// <param name="eventArgs">The tap.</param>
public void HandleTap(TappedEventArgs eventArgs)
{
if (tapExecuted)
{
return;
}
tapExecuted = true;
Debug.Log("ASA Info: We will create a new anchor.");
//// Clean up any anchors that have been placed.
//CleanupObjects();
// Construct a Ray using forward direction of the HoloLens.
Ray GazeRay = new Ray(eventArgs.headPose.position, eventArgs.headPose.forward);
// Raycast to get the hit point in the real world.
RaycastHit hitInfo;
Physics.Raycast(GazeRay, out hitInfo, float.MaxValue);
this.CreateAndSaveSphere(hitInfo.point);
}
/// <summary>
/// Creates a sphere at the hit point, and then saves a CloudSpatialAnchor there.
/// </summary>
/// <param name="hitPoint">The hit point.</param>
protected virtual void CreateAndSaveSphere(Vector3 hitPoint)
{
// Create a white sphere.
GameObject spatialAnchorObj = GameObject.Instantiate(spherePrefab, hitPoint, Quaternion.identity) as GameObject;
spatialAnchorObj.AddComponent<WorldAnchor>();
sphereMaterial = spatialAnchorObj.GetComponent<MeshRenderer>().material;
sphereMaterial.color = Color.white;
Debug.Log("ASA Info: Created a local anchor.");
// Create the CloudSpatialAnchor.
currentCloudAnchor = new CloudSpatialAnchor();
// Set the LocalAnchor property of the CloudSpatialAnchor to the WorldAnchor component of our white sphere.
WorldAnchor worldAnchor = spatialAnchorObj.GetComponent<WorldAnchor>();
if (worldAnchor == null)
{
throw new Exception("ASA Error: Couldn't get the local anchor pointer.");
}
// Save the CloudSpatialAnchor to the cloud.
currentCloudAnchor.LocalAnchor = worldAnchor.GetNativeSpatialAnchorPtr();
//cloudAnchor.AppProperties[#"x"] = #"frame";
//cloudAnchor.AppProperties[#"label"] = #"my latest picture";
Task.Run(async () =>
{
// Wait for enough data about the environment.
while (recommendedForCreate < 1.0F)
{
await Task.Delay(330);
}
bool success = false;
try
{
QueueOnUpdate(() =>
{
// We are about to save the CloudSpatialAnchor to the Azure Spatial Anchors, turn it yellow.
sphereMaterial.color = Color.yellow;
});
await cloudSpatialAnchorSession.CreateAnchorAsync(currentCloudAnchor);
success = currentCloudAnchor != null;
if (success)
{
// Record the identifier to locate.
string cloudAnchorId = currentCloudAnchor.Identifier;
QueueOnUpdate(() =>
{
// Turn the sphere blue.
sphereMaterial.color = Color.blue;
});
Debug.Log("ASA Info: Saved anchor to Azure Spatial Anchors! Identifier: " + cloudAnchorId);
//Debug.Log("Created " + cloudAnchorId + " at pos: " + worldAnchor.transform.position);
//Debug.Log("Created " + cloudAnchorId + "at rot: " + worldAnchor.transform.rotation);
anchorIds.Add(cloudAnchorId);
cloudSpatialAnchorIdsObjects.Add(cloudAnchorId, spatialAnchorObj);
WriteIds();
}
else
{
sphereMaterial.color = Color.red;
Debug.LogError("ASA Error: Failed to save, but no exception was thrown.");
}
}
catch (Exception ex)
{
QueueOnUpdate(() =>
{
sphereMaterial.color = Color.red;
});
Debug.LogError("ASA Error: " + ex.Message);
}
// Allow the user to tap again to clear state and look for the anchor.
tapExecuted = false;
});
}
void WriteIds()
{
try
{
string fileContent = ""
//= ReadString();
;
foreach (string id in anchorIds)
{
fileContent += id + Environment.NewLine;
}
using (StreamWriter writer = new StreamWriter(new FileStream(pathName, FileMode.OpenOrCreate, FileAccess.Write)))
{
writer.Write(fileContent);
}
}
catch (Exception e)
{
Debug.LogError(e);
}
}
void getIds()
{
try
{
StreamReader reader = new StreamReader(pathName);
string line;
while ((line = reader.ReadLine()) != null)
{
anchorIds.Add(line);
}
reader.Close();
}
catch (FileNotFoundException e)
{
Debug.LogWarning("No AnchorId file found");
}
}
}
Is something wrong with the way the anchors are created or is this normal behavior?

It is not surprising behavior that sometimes the anchors lose their position and it could be the case that some of the anchors are relocalized after tracking is recovered, but not all of them. It could be helpful to add a script to the object you create for the anchor that shows the tracking state of the anchor attached to it. Here is a sample:-
using System;
using UnityEngine;
using UnityEngine.XR.WSA;
public class ShowTrackingState : MonoBehavior
{
WorldAnchor worldAnchor = null;
Material renderMaterial = null;
bool isTracking = false;
void Start()
{
renderMaterial = gameObject.GetComponent<Renderer>().material;
}
void OnDestroy()
{
if (renderMaterial != null)
{
Destroy(renderMaterial);
}
}
void Update()
{
if (worldAnchor == null)
{
worldAnchor = gameObject.GetComponent<worldAnchor>();
}
if (worldAnchor == null)
{
isTracking = false;
}
else
{
isTracking = worldAnchor.isLocated;
}
renderMaterial.color = isTracking ? Color.red : Color.green;
}
}
Also, a gentle reminder, it is a common mistake to forget to hide one's account id and key when posting questions on forums; but it is not secure so you might want to remove that part from your code snippet in the question :-)

Related

How can I get a Target name from an image target to play a video player when finding a marker in an augmented reality app in Unity and vuforia

enter image description hereHow can I get a Target name from an image target to play a video player when finding a marker in an augmented reality app. basically I have this script and I want to modify it so that when detecting a marker it shows a 3d augmented reality object and on the side a video.Here's the script:
/*==============================================================================
Copyright (c) 2021 PTC Inc. All Rights Reserved.
Confidential and Proprietary - Protected under copyright and other laws.
Vuforia is a trademark of PTC Inc., registered in the United States and other
countries.
==============================================================================*/
using System;
using UnityEngine;
using UnityEngine.Events;
using Vuforia;
using UnityEngine.Video;
/// <summary>
/// A custom handler that implements the ITrackableEventHandler interface.
///
/// Changes made to this file could be overwritten when upgrading the Vuforia version.
/// When implementing custom event handler behavior, consider inheriting from this class instead.
/// </summary>
/// </summary>
public class ScriptModificado : MonoBehaviour
{
public enum TrackingStatusFilter
{
Tracked,
Tracked_ExtendedTracked,
Tracked_ExtendedTracked_Limited
}
/// <summary>
/// A filter that can be set to either:
/// - Only consider a target if it's in view (TRACKED)
/// - Also consider the target if's outside of the view, but the environment is tracked (EXTENDED_TRACKED)
/// - Even consider the target if tracking is in LIMITED mode, e.g. the environment is just 3dof tracked.
/// </summary>
public TrackingStatusFilter StatusFilter = TrackingStatusFilter.Tracked_ExtendedTracked_Limited;
public bool UsePoseSmoothing = false;
public AnimationCurve AnimationCurve = AnimationCurve.Linear(0, 0, LERP_DURATION, 1);
public UnityEvent OnTargetFound;
public UnityEvent OnTargetLost;
protected ObserverBehaviour mObserverBehaviour;
protected TargetStatus mPreviousTargetStatus = TargetStatus.NotObserved;
protected bool mCallbackReceivedOnce;
const float LERP_DURATION = 0.3f;
PoseSmoother mPoseSmoother;
// public AudioSource aSource;
// public AudioClip aClip;
public VideoPlayer vPlayer;
public GameObject BotonReproducir;
protected virtual void Start()
{
mObserverBehaviour = GetComponent<ObserverBehaviour>();
if (mObserverBehaviour)
{
mObserverBehaviour.OnTargetStatusChanged += OnObserverStatusChanged;
mObserverBehaviour.OnBehaviourDestroyed += OnObserverDestroyed;
OnObserverStatusChanged(mObserverBehaviour, mObserverBehaviour.TargetStatus);
SetupPoseSmoothing();
}
}
protected virtual void OnDestroy()
{
if (VuforiaBehaviour.Instance != null)
VuforiaBehaviour.Instance.World.OnStateUpdated -= OnStateUpdated;
if (mObserverBehaviour)
OnObserverDestroyed(mObserverBehaviour);
mPoseSmoother?.Dispose();
}
void OnObserverDestroyed(ObserverBehaviour observer)
{
mObserverBehaviour.OnTargetStatusChanged -= OnObserverStatusChanged;
mObserverBehaviour.OnBehaviourDestroyed -= OnObserverDestroyed;
mObserverBehaviour = null;
}
void OnObserverStatusChanged(ObserverBehaviour behaviour, TargetStatus targetStatus)
{
var name = mObserverBehaviour.TargetName;
if (mObserverBehaviour is VuMarkBehaviour vuMarkBehaviour && vuMarkBehaviour.InstanceId != null)
{
name += " (" + vuMarkBehaviour.InstanceId + ")";
}
Debug.Log($"Target status: {name} {targetStatus.Status} -- {targetStatus.StatusInfo}");
HandleTargetStatusChanged(mPreviousTargetStatus.Status, targetStatus.Status);
HandleTargetStatusInfoChanged(targetStatus.StatusInfo);
mPreviousTargetStatus = targetStatus;
}
protected virtual void HandleTargetStatusChanged(Status previousStatus, Status newStatus)
{
var shouldBeRendererBefore = ShouldBeRendered(previousStatus);
var shouldBeRendererNow = ShouldBeRendered(newStatus);
if (shouldBeRendererBefore != shouldBeRendererNow)
{
if (shouldBeRendererNow)
{
OnTrackingFound();
/*if (mObserverBehaviour.TargetName == "nocturno")
{
// aSource.PlayOneShot(aClip);
} */
if (mObserverBehaviour.TargetName == "nocturno")
{
vPlayer.Play();
}
}
else
{
OnTrackingLost();
if (mObserverBehaviour.TargetName == "nocturno")
{
// aSource.Stop();
vPlayer.Stop();
BotonReproducir.SetActive(false);
}
}
}
else
{
if (!mCallbackReceivedOnce && !shouldBeRendererNow)
{
// This is the first time we are receiving this callback, and the target is not visible yet.
// --> Hide the augmentation.
OnTrackingLost();
}
}
mCallbackReceivedOnce = true;
}
protected virtual void HandleTargetStatusInfoChanged(StatusInfo newStatusInfo)
{
if (newStatusInfo == StatusInfo.WRONG_SCALE)
{
Debug.LogErrorFormat("The target {0} appears to be scaled incorrectly. " +
"This might result in tracking issues. " +
"Please make sure that the target size corresponds to the size of the " +
"physical object in meters and regenerate the target or set the correct " +
"size in the target's inspector.", mObserverBehaviour.TargetName);
}
}
protected bool ShouldBeRendered(Status status)
{
if (status == Status.TRACKED)
{
// always render the augmentation when status is TRACKED, regardless of filter
return true;
}
if (StatusFilter == TrackingStatusFilter.Tracked_ExtendedTracked && status == Status.EXTENDED_TRACKED)
{
// also return true if the target is extended tracked
return true;
}
if (StatusFilter == TrackingStatusFilter.Tracked_ExtendedTracked_Limited &&
(status == Status.EXTENDED_TRACKED || status == Status.LIMITED))
{
// in this mode, render the augmentation even if the target's tracking status is LIMITED.
// this is mainly recommended for Anchors.
return true;
}
return false;
}
protected virtual void OnTrackingFound()
{
if (mObserverBehaviour)
{
var rendererComponents = mObserverBehaviour.GetComponentsInChildren<Renderer>(true);
var colliderComponents = mObserverBehaviour.GetComponentsInChildren<Collider>(true);
var canvasComponents = mObserverBehaviour.GetComponentsInChildren<Canvas>(true);
// Enable rendering:
foreach (var component in rendererComponents)
component.enabled = true;
// Enable colliders:
foreach (var component in colliderComponents)
component.enabled = true;
// Enable canvas':
foreach (var component in canvasComponents)
component.enabled = true;
}
OnTargetFound?.Invoke();
}
protected virtual void OnTrackingLost()
{
if (mObserverBehaviour)
{
var rendererComponents = mObserverBehaviour.GetComponentsInChildren<Renderer>(true);
var colliderComponents = mObserverBehaviour.GetComponentsInChildren<Collider>(true);
var canvasComponents = mObserverBehaviour.GetComponentsInChildren<Canvas>(true);
// Disable rendering:
foreach (var component in rendererComponents)
component.enabled = false;
// Disable colliders:
foreach (var component in colliderComponents)
component.enabled = false;
// Disable canvas':
foreach (var component in canvasComponents)
component.enabled = false;
}
OnTargetLost?.Invoke();
}
protected void SetupPoseSmoothing()
{
UsePoseSmoothing &= VuforiaBehaviour.Instance.WorldCenterMode == WorldCenterMode.DEVICE; // pose smoothing only works with the DEVICE world center mode
mPoseSmoother = new PoseSmoother(mObserverBehaviour, AnimationCurve);
VuforiaBehaviour.Instance.World.OnStateUpdated += OnStateUpdated;
}
void OnStateUpdated()
{
if (enabled && UsePoseSmoothing)
mPoseSmoother.Update();
}
class PoseSmoother
{
const float e = 0.001f;
const float MIN_ANGLE = 2f;
PoseLerp mActivePoseLerp;
Pose mPreviousPose;
readonly ObserverBehaviour mTarget;
readonly AnimationCurve mAnimationCurve;
TargetStatus mPreviousStatus;
public PoseSmoother(ObserverBehaviour target, AnimationCurve animationCurve)
{
mTarget = target;
mAnimationCurve = animationCurve;
}
public void Update()
{
var currentPose = new Pose(mTarget.transform.position, mTarget.transform.rotation);
var currentStatus = mTarget.TargetStatus;
UpdatePoseSmoothing(currentPose, currentStatus);
mPreviousPose = currentPose;
mPreviousStatus = currentStatus;
}
void UpdatePoseSmoothing(Pose currentPose, TargetStatus currentTargetStatus)
{
if (mActivePoseLerp == null && ShouldSmooth(currentPose, currentTargetStatus))
{
mActivePoseLerp = new PoseLerp(mPreviousPose, currentPose, mAnimationCurve);
}
if (mActivePoseLerp != null)
{
var pose = mActivePoseLerp.GetSmoothedPosition(Time.deltaTime);
mTarget.transform.SetPositionAndRotation(pose.position, pose.rotation);
if (mActivePoseLerp.Complete)
{
mActivePoseLerp = null;
}
}
}
/// Smooth pose transition if the pose changed and the target is still being reported as "extended tracked" or it has just returned to
/// "tracked" from previously being "extended tracked"
bool ShouldSmooth(Pose currentPose, TargetStatus currentTargetStatus)
{
return (currentTargetStatus.Status == Status.EXTENDED_TRACKED || (currentTargetStatus.Status == Status.TRACKED && mPreviousStatus.Status == Status.EXTENDED_TRACKED)) &&
(Vector3.SqrMagnitude(currentPose.position - mPreviousPose.position) > e || Quaternion.Angle(currentPose.rotation, mPreviousPose.rotation) > MIN_ANGLE);
}
public void Dispose()
{
mActivePoseLerp = null;
}
}
class PoseLerp
{
readonly AnimationCurve mCurve;
readonly Pose mStartPose;
readonly Pose mEndPose;
readonly float mEndTime;
float mElapsedTime;
public bool Complete { get; private set; }
public PoseLerp(Pose startPose, Pose endPose, AnimationCurve curve)
{
mStartPose = startPose;
mEndPose = endPose;
mCurve = curve;
mEndTime = mCurve.keys[mCurve.length - 1].time;
}
public Pose GetSmoothedPosition(float deltaTime)
{
mElapsedTime += deltaTime;
if (mElapsedTime >= mEndTime)
{
mElapsedTime = 0;
Complete = true;
return mEndPose;
}
var ratio = mCurve.Evaluate(mElapsedTime);
var smoothPosition = Vector3.Lerp(mStartPose.position, mEndPose.position, ratio);
var smoothRotation = Quaternion.Slerp(mStartPose.rotation, mEndPose.rotation, ratio);
return new Pose(smoothPosition, smoothRotation);
}
}
}
So how do I get the target name of the mObserverBehaviour method to change it to the "night" one and put the target name of my image target that will play the video? and how can I modify this code to show a video when the vuforia AR camera detects the image target so that the video is displayed on the side in a 2d Sprite object so that it looks more or less like this video: https://www.youtube.com/watch?v=izJatV5ypvM
attached imagesenter image description here
[1]: https://i.stack.imgur.com/Buleh.png
enter image description here
enter image description here
enter image description here

Find nearest waypoint to target in unity 3d

I'm developing game like Hitman Go , and I need to find which waypoints are closest to my target(specific waypoint ) when enemy has alerted by rock or sound and etc.
I set some point for enemy and My enemy patrolling between waypoints ( 8 -> 6 -> 1-> 2-> 3-> 4-> 5 ) then reveres his path.
so, when I throw rock in waypoint number 18, I need to move enemy to this waypoint, but with closest way. imaging enemy can be any this waypoint when he get aleret ( 8,6,1,2,3,4,5 points).
Note 1: The distance between two points is same.
Note2: My game is in 3d not 2d.
I use this code to move my enemy step by step ( turn base ).
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class WaypointController : MonoBehaviour
{
public List<Transform> waypoints = new List<Transform>();
private Transform targetWaypoint;
private int targetWaypointIndex = 0;
private float minDistance = 0.1f;
private int lastWaypointIndex;
public bool reversePath;
// what easetype to use for iTweening
public iTween.EaseType easeType = iTween.EaseType.easeInOutExpo;
// how fast we move
public float moveSpeed = 1.5f;
// time to rotate to face destination
public float rotateTime = 0.5f;
// delay to use before any call to iTween
public float iTweenDelay = 0f;
// Use this for initialization
void Start()
{
lastWaypointIndex = waypoints.Count - 1;
targetWaypoint = waypoints[targetWaypointIndex];
}
public void EnemyTurn()
{
float distance = Vector3.Distance(transform.position, targetWaypoint.position);
CheckDistanceToWaypoint(distance);
// move toward the destinationPos using the easeType and moveSpeed variables
iTween.MoveTo(gameObject, iTween.Hash(
"x", targetWaypoint.position.x,
"y", targetWaypoint.position.y,
"z", targetWaypoint.position.z,
"delay", iTweenDelay,
"easetype", easeType,
"speed", moveSpeed
));
}
void CheckDistanceToWaypoint(float currentDistance)
{
if (currentDistance <= minDistance)
{
targetWaypointIndex++;
UpdateTargetWaypoint();
}
}
void UpdateTargetWaypoint()
{
if (targetWaypointIndex > lastWaypointIndex)
{
if (reversePath)
{
waypoints.Reverse();
}
targetWaypointIndex = 1;
}
targetWaypoint = waypoints[targetWaypointIndex];
}
}
Since you asked in the comments for a non-A* solution & I was bored :)
Naive DFS pathfinder with very simple pruning.
Brute force and very wasteful both memory & CPU wise.
For a game similar to Hitman Go / Lara Croft Go - I would use this code and not a navmesh / A*.
For a RTS / FPS or any AI intensive games, I would definitely not use this solution.
public class GridPathfinder
{
// Not thread safe
private int? _currentShortestPath;
/// <summary>
/// Finds shortest path from cell A to B
/// </summary>
/// <returns>Shortest found path; null if found no path.</returns>
public IList<Node> GetShortestPath(Node a, Node b)
{
_currentShortestPath = null;
return GetShortestPathInternal(a, b, new List<Node>());
}
private IList<Node> GetShortestPathInternal(Node #from, Node to, List<Node> currentPath)
{
// Sanity
if (currentPath.Contains(from))
{
return null;
}
// Prune
if (_currentShortestPath.HasValue && currentPath.Count + 1 >= _currentShortestPath)
{
return null;
}
currentPath.Add(from);
if (from == to)
{
return currentPath;
}
// Check neighbors recursively
IList<Node> foundShortestPath = null;
foreach (var connectedCell in from.ConnectedCells)
{
var cellPath = GetShortestPathInternal(connectedCell, to, new List<Node>(currentPath));
if (cellPath == null || foundShortestPath != null && cellPath.Count >= foundShortestPath.Count)
{
continue;
}
foundShortestPath = cellPath;
}
// Update shortest path for future pruning
if (foundShortestPath != null && (!_currentShortestPath.HasValue || _currentShortestPath > foundShortestPath.Count))
{
_currentShortestPath = foundShortestPath.Count;
}
return foundShortestPath;
}
}
public class Node
{
private readonly HashSet<Node> _connectedCells = new HashSet<Node>();
public IEnumerable<Node> ConnectedCells => _connectedCells;
/// <summary>
/// Add a connection
/// </summary>
/// <param name="toAdd">Node to add</param>
/// <param name="isTwoWay">Should add a connection from target node to this node</param>
public void AddConnection(Node toAdd, bool isTwoWay=true)
{
if (toAdd == null || toAdd == this)
{
throw new Exception("Invalid connection attempted");
}
// Attempt to add
if (!_connectedCells.Add(toAdd))
{
return;
}
if (!isTwoWay) return;
toAdd.AddConnection(this);
}
}

AR core loading prefabs dynamically in unity

I have recently started dabbling in Unity with ARcore, I am using the HelloARController.cs that comes with ARcore 1.5 but I want to load a bundle from my server, I have two scripts
one called LoadAsset.cs which loads a bundle from a webserver and I have another called HelloARcontroller.cs which is basically the main part of the app, this allows me to preset prefabs that will be loaded when the screen is clicked
What I would like to do instead of loading one of the "Andy" objects is for it to pull a prefab from the webserver and use that instead
I have managed so far to load a prefab from my server at runtime but placing it is another matter
namespace GoogleARCore.Examples.HelloAR
{
using System.Collections.Generic;
using GoogleARCore;
using GoogleARCore.Examples.Common;
using UnityEngine;
using System;
using System.Collections;
#if UNITY_EDITOR
// Set up touch input propagation while using Instant Preview in the editor.
using Input = InstantPreviewInput;
#endif
/// <summary>
/// Controls the HelloAR example.
/// </summary>
public class HelloARController : MonoBehaviour
{
public string BundleURL; // -->
http://Myserver/public_http/Assets/AssetBundles/cube_prefab (path to the AssetBundle)
public string AssetName; // --> Cube_pref (name of the Asset prefab)
public int version;
/// <summary>
/// The first-person camera being used to render the passthrough camera image (i.e. AR background).
/// </summary>
public Camera FirstPersonCamera;
/// <summary>
/// A prefab for tracking and visualizing detected planes.
/// </summary>
public GameObject DetectedPlanePrefab;
/// <summary>
/// A model to place when a raycast from a user touch hits a plane.
/// </summary>
public GameObject AndyPlanePrefab;
/// <summary>
/// A model to place when a raycast from a user touch hits a feature point.
/// </summary>
public GameObject AndyPointPrefab;
/// <summary>
/// A gameobject parenting UI for displaying the "searching for planes" snackbar.
/// </summary>
public GameObject SearchingForPlaneUI;
/// <summary>
/// The rotation in degrees need to apply to model when the Andy model is placed.
/// </summary>
private const float k_ModelRotation = 180.0f;
/// <summary>
/// A list to hold all planes ARCore is tracking in the current frame. This object is used across
/// the application to avoid per-frame allocations.
/// </summary>
private List<DetectedPlane> m_AllPlanes = new List<DetectedPlane>();
/// <summary>
/// True if the app is in the process of quitting due to an ARCore connection error, otherwise false.
/// </summary>
private bool m_IsQuitting = false;
/// <summary>
/// The Unity Update() method.
/// </summary>
void Start()
{
StartCoroutine(DownloadAndCache());
}
IEnumerator DownloadAndCache()
{
// Load the AssetBundle file from Cache if it exists with the same version or download and store it in the cache
using (WWW www = WWW.LoadFromCacheOrDownload(BundleURL, version))
{
yield return www;
if (www.error != null)
throw new Exception("WWW download had an error:" + www.error);
AssetBundle bundle = www.assetBundle;
GameObject testing = bundle.LoadAsset(AssetName) as GameObject;
// Unload the AssetBundles compressed contents to conserve memory
bundle.Unload(false);
} // memory is freed from the web stream (www.Dispose() gets called implicitly)
}
I've declared all my variables and managed to get the Prefab to load up to this point ^^^^^
public void Update()
{
_UpdateApplicationLifecycle();
// Hide snackbar when currently tracking at least one plane.
Session.GetTrackables<DetectedPlane>(m_AllPlanes);
bool showSearchingUI = true;
for (int i = 0; i < m_AllPlanes.Count; i++)
{
if (m_AllPlanes[i].TrackingState == TrackingState.Tracking)
{
showSearchingUI = false;
break;
}
}
SearchingForPlaneUI.SetActive(showSearchingUI);
// If the player has not touched the screen, we are done with this update.
Touch touch;
if (Input.touchCount < 1 || (touch = Input.GetTouch(0)).phase != TouchPhase.Began)
{
return;
}
// Raycast against the location the player touched to search for planes.
TrackableHit hit;
TrackableHitFlags raycastFilter = TrackableHitFlags.PlaneWithinPolygon |
TrackableHitFlags.FeaturePointWithSurfaceNormal;
if (Frame.Raycast(touch.position.x, touch.position.y, raycastFilter, out hit))
{
// Use hit pose and camera pose to check if hittest is from the
// back of the plane, if it is, no need to create the anchor.
if ((hit.Trackable is DetectedPlane) &&
Vector3.Dot(FirstPersonCamera.transform.position - hit.Pose.position,
hit.Pose.rotation * Vector3.up) < 0)
{
Debug.Log("Hit at back of the current DetectedPlane");
}
else
{
This is where it loads the Andy prefabs, what I would like to do is swap the andy prefab out for the asset bundle
// Choose the Andy model for the Trackable that got hit.
GameObject prefab;
if (hit.Trackable is FeaturePoint)
{
prefab = AndyPlanePrefab;
}
else
{
prefab = AndyPointPrefab;
}
// Instantiate Andy model at the hit pose.
var andyObject = Instantiate(prefab, hit.Pose.position, hit.Pose.rotation);
// Compensate for the hitPose rotation facing away from the raycast (i.e. camera).
andyObject.transform.Rotate(0, k_ModelRotation, 0, Space.Self);
// Create an anchor to allow ARCore to track the hitpoint as understanding of the physical
// world evolves.
var anchor = hit.Trackable.CreateAnchor(hit.Pose);
// Make Andy model a child of the anchor.
andyObject.transform.parent = anchor.transform;
}
}
}
/// <summary>
/// Check and update the application lifecycle.
/// </summary>
private void _UpdateApplicationLifecycle()
{
// Exit the app when the 'back' button is pressed.
if (Input.GetKey(KeyCode.Escape))
{
Application.Quit();
}
// Only allow the screen to sleep when not tracking.
if (Session.Status != SessionStatus.Tracking)
{
const int lostTrackingSleepTimeout = 15;
Screen.sleepTimeout = lostTrackingSleepTimeout;
}
else
{
Screen.sleepTimeout = SleepTimeout.NeverSleep;
}
if (m_IsQuitting)
{
return;
}
// Quit if ARCore was unable to connect and give Unity some time for the toast to appear.
if (Session.Status == SessionStatus.ErrorPermissionNotGranted)
{
_ShowAndroidToastMessage("Camera permission is needed to run this application.");
m_IsQuitting = true;
Invoke("_DoQuit", 0.5f);
}
else if (Session.Status.IsError())
{
_ShowAndroidToastMessage("ARCore encountered a problem connecting. Please start the app again.");
m_IsQuitting = true;
Invoke("_DoQuit", 0.5f);
}
}
/// <summary>
/// Actually quit the application.
/// </summary>
private void _DoQuit()
{
Application.Quit();
}
/// <summary>
/// Show an Android toast message.
/// </summary>
/// <param name="message">Message string to show in the toast.</param>
private void _ShowAndroidToastMessage(string message)
{
AndroidJavaClass unityPlayer = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
AndroidJavaObject unityActivity = unityPlayer.GetStatic<AndroidJavaObject>("currentActivity");
if (unityActivity != null)
{
AndroidJavaClass toastClass = new AndroidJavaClass("android.widget.Toast");
unityActivity.Call("runOnUiThread", new AndroidJavaRunnable(() =>
{
AndroidJavaObject toastObject = toastClass.CallStatic<AndroidJavaObject>("makeText", unityActivity,
message, 0);
toastObject.Call("show");
}));
}
}
}
}
My plan is to change the URL dynamically as thats just a public variable so that I can load a different prefab once the app is running, then change the url and load a different prefab.
Any help, advice greatly appreciated.

Oculus and Unity: grab object makes avatar fall through the floor?

I just downloaded the basic Oculus sample framework for Unity (I'm on 2018.2) and am trying to combine the TeleportAvatar and AvatarWithGrab scenes - to have a teleporting avatar able to grab objects.
All is well - I dragged an object with OVR Grabbable script (below) into the teleport scene, added OVRGrabbers to the avatar object and hit play. I can successfully pick up the object after teleporting, etc but for some reason after interacting with grabbable object a while my character falls through floor (maybe teleports below floor?).
I even added a new floor to be sure there was a box collider to prevent falling; however, this had no effect.
I looked for any overlap between the controls and even turning off teleport, but the grabbable object script is causing the fall. Why is this?
Doing the grabbing motion with right hand (teleporting hand) doesn't cause any issues if I'm not near a grab object.
/// <summary>
/// An object that can be grabbed and thrown by OVRGrabber.
/// </summary>
public class OVRGrabbable : MonoBehaviour
{
[SerializeField]
protected bool m_allowOffhandGrab = true;
[SerializeField]
protected bool m_snapPosition = false;
[SerializeField]
protected bool m_snapOrientation = false;
[SerializeField]
protected Transform m_snapOffset;
[SerializeField]
protected Collider[] m_grabPoints = null;
protected bool m_grabbedKinematic = false;
protected Collider m_grabbedCollider = null;
protected OVRGrabber m_grabbedBy = null;
/// <summary>
/// If true, the object can currently be grabbed.
/// </summary>
public bool allowOffhandGrab
{
get { return m_allowOffhandGrab; }
}
/// <summary>
/// If true, the object is currently grabbed.
/// </summary>
public bool isGrabbed
{
get { return m_grabbedBy != null; }
}
/// <summary>
/// If true, the object's position will snap to match snapOffset when grabbed.
/// </summary>
public bool snapPosition
{
get { return m_snapPosition; }
}
/// <summary>
/// If true, the object's orientation will snap to match snapOffset when grabbed.
/// </summary>
public bool snapOrientation
{
get { return m_snapOrientation; }
}
/// <summary>
/// An offset relative to the OVRGrabber where this object can snap when grabbed.
/// </summary>
public Transform snapOffset
{
get { return m_snapOffset; }
}
/// <summary>
/// Returns the OVRGrabber currently grabbing this object.
/// </summary>
public OVRGrabber grabbedBy
{
get { return m_grabbedBy; }
}
/// <summary>
/// The transform at which this object was grabbed.
/// </summary>
public Transform grabbedTransform
{
get { return m_grabbedCollider.transform; }
}
/// <summary>
/// The Rigidbody of the collider that was used to grab this object.
/// </summary>
public Rigidbody grabbedRigidbody
{
get { return m_grabbedCollider.attachedRigidbody; }
}
/// <summary>
/// The contact point(s) where the object was grabbed.
/// </summary>
public Collider[] grabPoints
{
get { return m_grabPoints; }
}
/// <summary>
/// Notifies the object that it has been grabbed.
/// </summary>
virtual public void GrabBegin(OVRGrabber hand, Collider grabPoint)
{
m_grabbedBy = hand;
m_grabbedCollider = grabPoint;
gameObject.GetComponent<Rigidbody>().isKinematic = true;
}
/// <summary>
/// Notifies the object that it has been released.
/// </summary>
virtual public void GrabEnd(Vector3 linearVelocity, Vector3 angularVelocity)
{
Rigidbody rb = gameObject.GetComponent<Rigidbody>();
rb.isKinematic = m_grabbedKinematic;
rb.velocity = linearVelocity;
rb.angularVelocity = angularVelocity;
m_grabbedBy = null;
m_grabbedCollider = null;
}
void Awake()
{
if (m_grabPoints.Length == 0)
{
// Get the collider from the grabbable
Collider collider = this.GetComponent<Collider>();
if (collider == null)
{
throw new ArgumentException("Grabbables cannot have zero grab points and no collider -- please add a grab point or collider.");
}
// Create a default grab point
m_grabPoints = new Collider[1] { collider };
}
}
protected virtual void Start()
{
m_grabbedKinematic = GetComponent<Rigidbody>().isKinematic;
}
void OnDestroy()
{
if (m_grabbedBy != null)
{
// Notify the hand to release destroyed grabbables
m_grabbedBy.ForceRelease(this);
}
}
}

UNet player controller issues

I've got a player controller that should work over UNET. I must not understand something though as any remote players joining a game can't control their character.
The hosting local player can control his/her character just fine.
Basically the way I think this is working is that in Update the local player can press keys. Those keypresses issue Commands to the server where synced bools are set.
In FixedUpdate the server moves the Rigidbody around based on the set bools. On the player object I have a NetworkTransform so any movement the server does should be sent back to the client.
using UnityEngine;
using UnityEngine.Networking;
using System.Collections;
[RequireComponent(typeof(NetworkIdentity))]
public class PlayerController : NetworkBehaviour {
public GameObject NormalBullet;
public Vector3 size = new Vector3(0.25f, 0.25f, 0.25f);
private float speed = 8;
private float angularSpeed = 35;
private float jumpForce = 10;
private Rigidbody _rigidbody;
private Map _map;
private NHNetworkedPool _pool;
private bool _active = false;
private Vector3 _lastPosition;
[SyncVar]
private bool _moveForward;
[SyncVar]
private bool _moveBackward;
[SyncVar]
private bool _turnLeft;
[SyncVar]
private bool _turnRight;
[SyncVar]
private bool _jump;
[SyncVar]
private bool _isgrounded;
[SyncVar]
private bool _isFireing;
void Awake () {
Messenger.AddListener ("MAP_LOADED", OnMapLoaded);
_rigidbody = gameObject.GetComponent<Rigidbody> ();
_map = GameObject.Find ("Map").GetComponent<Map> ();
Physics.IgnoreLayerCollision(LayerMask.NameToLayer("Players"), LayerMask.NameToLayer("Players"), true);
}
override public void OnStartClient () {
_rigidbody.position = new Vector3 (-100, -100, -100);
if (NetworkServer.active) {
_pool = FindObjectOfType<NHNetworkedPool> ();
}
}
/// <summary>
/// Once the board is built, hookup the camera if this is the local player
/// and set the player as active.
/// </summary>
void OnMapLoaded () {
if (isLocalPlayer) {
// Hook up the camera
PlayerCamera cam = Camera.main.GetComponent<PlayerCamera>();
cam.target = transform;
// Move the player to the it's spawn location
CmdSpawn();
}
// Set the player as active
_active = true;
}
/// <summary>
/// Only and active local player should be able to
/// issue commands for the player
/// </summary>
void Update () {
if (!isLocalPlayer || !_active) {
return;
}
if (Input.GetKeyDown ("up")) {
CmdSetMoveForward (true);
}
if (Input.GetKeyUp ("up")) {
CmdSetMoveForward (false);
}
if (Input.GetKeyDown ("down")) {
CmdSetMoveBackward (true);
}
if (Input.GetKeyUp ("down")) {
CmdSetMoveBackward (false);
}
if (Input.GetKeyDown ("left")) {
CmdSetTurnLeft (true);
}
if (Input.GetKeyUp ("left")) {
CmdSetTurnLeft (false);
}
if (Input.GetKeyDown ("right")) {
CmdSetTurnRight (true);
}
if (Input.GetKeyUp ("right")) {
CmdSetTurnRight (false);
}
if (Input.GetKeyDown (KeyCode.Space)) {
CmdSetJump (true);
}
if (Input.GetKeyUp (KeyCode.Space)) {
CmdSetJump (false);
}
if (Input.GetKeyDown (KeyCode.LeftShift)) {
CmdSetShooting(true);
}
if (Input.GetKeyUp (KeyCode.LeftShift)) {
CmdSetShooting(false);
}
}
/// <summary>
/// Only the server should update the player's location
/// the transform is synced to the clients
/// </summary>
void FixedUpdate () {
if (!isServer) {
return;
}
if (_moveForward) {
float moveAmount = speed * Time.deltaTime;
_rigidbody.MovePosition(_rigidbody.position + _rigidbody.transform.forward * moveAmount);
}
if (_moveBackward) {
float moveAmount = (-speed * 0.6f) * Time.deltaTime;
_rigidbody.MovePosition(_rigidbody.position + _rigidbody.transform.forward * moveAmount);
}
if (_turnLeft) {
Quaternion rotateAmount = Quaternion.Euler(new Vector3(0f, -angularSpeed, 0f) * Time.deltaTime);
_rigidbody.MoveRotation(_rigidbody.rotation * rotateAmount);
}
if (_turnRight) {
Quaternion rotateAmount = Quaternion.Euler(new Vector3(0f, angularSpeed, 0f) * Time.deltaTime);
_rigidbody.MoveRotation(_rigidbody.rotation * rotateAmount);
}
if (_jump && _isgrounded) {
_rigidbody.AddForce(Vector3.up * 250);
}
}
void OnCollisionStay (Collision collision) {
if(collision.gameObject.tag.ToUpper() == "GROUND") {
_isgrounded = true;
}
}
void OnCollisionExit (Collision collision) {
if(collision.gameObject.tag.ToUpper() == "GROUND") {
_isgrounded = false;
}
}
/// <summary>
/// Client -> Server
/// Move the player to a spawn location
/// </summary>
void CmdSpawn() {
_rigidbody.position = _map.GetPlayerSpawn();
_rigidbody.velocity = Vector3.zero;
}
/// <summary>
/// Client -> Server
/// Set the forward move of the player on/off
/// </summary>
[Command]
void CmdSetMoveForward (bool active) {
_moveForward = active;
}
/// <summary>
/// Client -> Server
/// Set the backward of the player on/off
/// </summary>
[Command]
void CmdSetMoveBackward (bool active) {
_moveBackward = active;
}
/// <summary>
/// Client -> Server
/// Set the left turn of the player on/off
/// </summary>
[Command]
void CmdSetTurnLeft (bool active) {
_turnLeft = active;
}
/// <summary>
/// Client -> Server
/// Set the right turn of the player on/off
/// </summary>
[Command]
void CmdSetTurnRight (bool active) {
_turnRight = active;
}
/// <summary>
/// Client -> Server
/// Set the jumpping of the player on/off
/// </summary>
[Command]
void CmdSetJump (bool active) {
_jump = active;
}
/// <summary>
/// Client -> Server
/// Set shooting weapon on/off
/// </summary>
[Command]
void CmdSetShooting (bool active) {
_isFireing = true;
}
}
You should not do the movement on the server. Rewrite it so that the movement is calculated and executed on the client.
Then add an NetworkTransform component to the player and it should work.
Only the Fire method has to be a Command. But because I don't know what actually happens when _isFireing = true I can't tell you what you should write exactly ;)
EDIT: You also need a NetworkIdentity component on the player if you don't have one