Oculus and Unity: grab object makes avatar fall through the floor? - unity3d

I just downloaded the basic Oculus sample framework for Unity (I'm on 2018.2) and am trying to combine the TeleportAvatar and AvatarWithGrab scenes - to have a teleporting avatar able to grab objects.
All is well - I dragged an object with OVR Grabbable script (below) into the teleport scene, added OVRGrabbers to the avatar object and hit play. I can successfully pick up the object after teleporting, etc but for some reason after interacting with grabbable object a while my character falls through floor (maybe teleports below floor?).
I even added a new floor to be sure there was a box collider to prevent falling; however, this had no effect.
I looked for any overlap between the controls and even turning off teleport, but the grabbable object script is causing the fall. Why is this?
Doing the grabbing motion with right hand (teleporting hand) doesn't cause any issues if I'm not near a grab object.
/// <summary>
/// An object that can be grabbed and thrown by OVRGrabber.
/// </summary>
public class OVRGrabbable : MonoBehaviour
{
[SerializeField]
protected bool m_allowOffhandGrab = true;
[SerializeField]
protected bool m_snapPosition = false;
[SerializeField]
protected bool m_snapOrientation = false;
[SerializeField]
protected Transform m_snapOffset;
[SerializeField]
protected Collider[] m_grabPoints = null;
protected bool m_grabbedKinematic = false;
protected Collider m_grabbedCollider = null;
protected OVRGrabber m_grabbedBy = null;
/// <summary>
/// If true, the object can currently be grabbed.
/// </summary>
public bool allowOffhandGrab
{
get { return m_allowOffhandGrab; }
}
/// <summary>
/// If true, the object is currently grabbed.
/// </summary>
public bool isGrabbed
{
get { return m_grabbedBy != null; }
}
/// <summary>
/// If true, the object's position will snap to match snapOffset when grabbed.
/// </summary>
public bool snapPosition
{
get { return m_snapPosition; }
}
/// <summary>
/// If true, the object's orientation will snap to match snapOffset when grabbed.
/// </summary>
public bool snapOrientation
{
get { return m_snapOrientation; }
}
/// <summary>
/// An offset relative to the OVRGrabber where this object can snap when grabbed.
/// </summary>
public Transform snapOffset
{
get { return m_snapOffset; }
}
/// <summary>
/// Returns the OVRGrabber currently grabbing this object.
/// </summary>
public OVRGrabber grabbedBy
{
get { return m_grabbedBy; }
}
/// <summary>
/// The transform at which this object was grabbed.
/// </summary>
public Transform grabbedTransform
{
get { return m_grabbedCollider.transform; }
}
/// <summary>
/// The Rigidbody of the collider that was used to grab this object.
/// </summary>
public Rigidbody grabbedRigidbody
{
get { return m_grabbedCollider.attachedRigidbody; }
}
/// <summary>
/// The contact point(s) where the object was grabbed.
/// </summary>
public Collider[] grabPoints
{
get { return m_grabPoints; }
}
/// <summary>
/// Notifies the object that it has been grabbed.
/// </summary>
virtual public void GrabBegin(OVRGrabber hand, Collider grabPoint)
{
m_grabbedBy = hand;
m_grabbedCollider = grabPoint;
gameObject.GetComponent<Rigidbody>().isKinematic = true;
}
/// <summary>
/// Notifies the object that it has been released.
/// </summary>
virtual public void GrabEnd(Vector3 linearVelocity, Vector3 angularVelocity)
{
Rigidbody rb = gameObject.GetComponent<Rigidbody>();
rb.isKinematic = m_grabbedKinematic;
rb.velocity = linearVelocity;
rb.angularVelocity = angularVelocity;
m_grabbedBy = null;
m_grabbedCollider = null;
}
void Awake()
{
if (m_grabPoints.Length == 0)
{
// Get the collider from the grabbable
Collider collider = this.GetComponent<Collider>();
if (collider == null)
{
throw new ArgumentException("Grabbables cannot have zero grab points and no collider -- please add a grab point or collider.");
}
// Create a default grab point
m_grabPoints = new Collider[1] { collider };
}
}
protected virtual void Start()
{
m_grabbedKinematic = GetComponent<Rigidbody>().isKinematic;
}
void OnDestroy()
{
if (m_grabbedBy != null)
{
// Notify the hand to release destroyed grabbables
m_grabbedBy.ForceRelease(this);
}
}
}

Related

Azure Spatial Anchors / Unity World Anchors lose position

I am working on a simple unity app testing azure spatial anchors on the HoloLens. I started from this example (https://github.com/Azure/azure-spatial-anchors-samples) and changed it a little bit to create several anchors.
In some test sessions I experienced that the anchored objects lost their position suddenly and where moved about 10 meters or more.
As I unterstand HoloLens and mixed reality the camera position is tracked by kind of visual odometry or rather SLAM algorithms so it is normal that the pose of the device drifts over the time and so will the anchors do. But I did not expect such a huge shift.
Furthermore I expected the anchors to get back in places in the moment the features in the neighborhood of the anchors are visible again for the devices camera. But this is not always the case. Sometimes the anchors get back at their original position when the features are visible again, but sometimes this does not change anything about the wrong positions.
This is the code:
using Microsoft.Azure.SpatialAnchors;
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
using UnityEngine;
using UnityEngine.XR.WSA;
using UnityEngine.XR.WSA.Input;
using System.Linq;
using System.IO;
using UnityEditor;
public class AzureSpatialAnchorsScript : MonoBehaviour
{
/// <summary>
/// The sphere prefab.
/// </summary>
public GameObject spherePrefab;
/// <summary>
/// Set this string to the Spatial Anchors account id provided in the Spatial Anchors resource.
/// </summary>
protected string SpatialAnchorsAccountId = "xxxxxxxxxxxxxxxx";
/// <summary>
/// Set this string to the Spatial Anchors account key provided in the Spatial Anchors resource.
/// </summary>
protected string SpatialAnchorsAccountKey = "yyyyyyyyyyyyyyyyyyyyyyy";
/// <summary>
/// Our queue of actions that will be executed on the main thread.
/// </summary>
private readonly Queue<Action> dispatchQueue = new Queue<Action>();
/// <summary>
/// Use the recognizer to detect air taps.
/// </summary>
private GestureRecognizer recognizer;
protected CloudSpatialAnchorSession cloudSpatialAnchorSession;
/// <summary>
/// The CloudSpatialAnchor that we either 1) placed and are saving or 2) just located.
/// </summary>
protected CloudSpatialAnchor currentCloudAnchor;
/// <summary>
/// True if we are creating + saving an anchor
/// </summary>
protected bool tapExecuted = false;
/// <summary>
/// The IDs of the CloudSpatialAnchor that were saved. Use it to find the CloudSpatialAnchors
/// </summary>
protected Dictionary<string, GameObject> cloudSpatialAnchorIdsObjects = new Dictionary<string, GameObject> { };
protected IList<string> anchorIds = new List<string>();
/// <summary>
/// The sphere rendered to show the position of the CloudSpatialAnchor.
/// </summary>
protected Material sphereMaterial;
/// <summary>
/// Indicate if we are ready to save an anchor. We can save an anchor when value is greater than 1.
/// </summary>
protected float recommendedForCreate = 0;
private string pathName;
// Start is called before the first frame update
void Start()
{
Application.SetStackTraceLogType(LogType.Log, StackTraceLogType.None);
recognizer = new GestureRecognizer();
recognizer.StartCapturingGestures();
recognizer.SetRecognizableGestures(GestureSettings.Tap);
recognizer.Tapped += HandleTap;
InitializeSession();
string FileName = "ids.txt";
pathName = Path.Combine(Application.persistentDataPath, FileName);
getIds();
if (anchorIds.Count > 0)
{
CreateWatcher(anchorIds.ToArray());
}
}
// Update is called once per frame
void Update()
{
lock (dispatchQueue)
{
if (dispatchQueue.Count > 0)
{
dispatchQueue.Dequeue()();
}
}
}
/// <summary>
/// Queues the specified <see cref="Action"/> on update.
/// </summary>
/// <param name="updateAction">The update action.</param>
protected void QueueOnUpdate(Action updateAction)
{
lock (dispatchQueue)
{
dispatchQueue.Enqueue(updateAction);
}
}
/// <summary>
/// Cleans up objects.
/// </summary>
public void CleanupObjects()
{
if (cloudSpatialAnchorIdsObjects != null)
{
cloudSpatialAnchorIdsObjects = new Dictionary<string, GameObject>();
}
if (sphereMaterial != null)
{
Destroy(sphereMaterial);
sphereMaterial = null;
}
//currentCloudAnchor = null;
}
/// <summary>
/// Initializes a new CloudSpatialAnchorSession.
/// </summary>
void InitializeSession()
{
Debug.Log("ASA Info: Initializing a CloudSpatialAnchorSession.");
if (string.IsNullOrEmpty(SpatialAnchorsAccountId))
{
Debug.LogError("No account id set.");
return;
}
if (string.IsNullOrEmpty(SpatialAnchorsAccountKey))
{
Debug.LogError("No account key set.");
return;
}
cloudSpatialAnchorSession = new CloudSpatialAnchorSession();
cloudSpatialAnchorSession.Configuration.AccountId = SpatialAnchorsAccountId.Trim();
cloudSpatialAnchorSession.Configuration.AccountKey = SpatialAnchorsAccountKey.Trim();
cloudSpatialAnchorSession.LogLevel = SessionLogLevel.All;
cloudSpatialAnchorSession.Error += CloudSpatialAnchorSession_Error;
cloudSpatialAnchorSession.OnLogDebug += CloudSpatialAnchorSession_OnLogDebug;
cloudSpatialAnchorSession.SessionUpdated += CloudSpatialAnchorSession_SessionUpdated;
cloudSpatialAnchorSession.AnchorLocated += CloudSpatialAnchorSession_AnchorLocated;
cloudSpatialAnchorSession.LocateAnchorsCompleted += CloudSpatialAnchorSession_LocateAnchorsCompleted;
cloudSpatialAnchorSession.Start();
Debug.Log("ASA Info: Session was initialized.");
}
void CreateWatcher(string[] cloudSpatialAnchorIds)
{
Debug.Log("ASA Info: We will look for placeded anchors.");
// Create a Watcher to look for the anchor we created.
AnchorLocateCriteria criteria = new AnchorLocateCriteria();
criteria.Identifiers = cloudSpatialAnchorIds;
cloudSpatialAnchorSession.CreateWatcher(criteria);
Debug.Log("ASA Info: Watcher created. Number of active watchers: " + cloudSpatialAnchorSession.GetActiveWatchers().Count);
}
private void CloudSpatialAnchorSession_Error(object sender, SessionErrorEventArgs args)
{
Debug.LogError("ASA Error: " + args.ErrorMessage);
}
private void CloudSpatialAnchorSession_OnLogDebug(object sender, OnLogDebugEventArgs args)
{
Debug.Log("ASA Log: " + args.Message);
System.Diagnostics.Debug.WriteLine("ASA Log: " + args.Message);
}
private void CloudSpatialAnchorSession_SessionUpdated(object sender, SessionUpdatedEventArgs args)
{
Debug.Log("ASA Log: recommendedForCreate: " + args.Status.RecommendedForCreateProgress);
recommendedForCreate = args.Status.RecommendedForCreateProgress;
}
private void CloudSpatialAnchorSession_AnchorLocated(object sender, AnchorLocatedEventArgs args)
{
switch (args.Status)
{
case LocateAnchorStatus.Located:
Debug.Log("ASA Info: Anchor located! Identifier: " + args.Identifier);
QueueOnUpdate(() =>
{
// Create a green sphere.
GameObject spatialAnchorObj = GameObject.Instantiate(spherePrefab, Vector3.zero, Quaternion.identity) as GameObject;
spatialAnchorObj.AddComponent<WorldAnchor>();
sphereMaterial = spatialAnchorObj.GetComponent<MeshRenderer>().material;
sphereMaterial.color = Color.green;
// Get the WorldAnchor from the CloudSpatialAnchor and use it to position the sphere.
spatialAnchorObj.GetComponent<UnityEngine.XR.WSA.WorldAnchor>().SetNativeSpatialAnchorPtr(args.Anchor.LocalAnchor);
cloudSpatialAnchorIdsObjects.Add(args.Anchor.Identifier, spatialAnchorObj);
Debug.Log("Detected Pos: " + spatialAnchorObj.GetComponent<UnityEngine.XR.WSA.WorldAnchor>().transform.position.ToString("F4"));
Debug.Log("Detected Rot: " + spatialAnchorObj.GetComponent<UnityEngine.XR.WSA.WorldAnchor>().transform.rotation.ToString("F4"));
tapExecuted = false;
});
break;
case LocateAnchorStatus.AlreadyTracked:
Debug.Log("ASA Info: Anchor already tracked. Identifier: " + args.Identifier);
break;
case LocateAnchorStatus.NotLocated:
Debug.Log("ASA Info: Anchor not located. Identifier: " + args.Identifier);
break;
case LocateAnchorStatus.NotLocatedAnchorDoesNotExist:
Debug.LogError("ASA Error: Anchor not located does not exist. Identifier: " + args.Identifier);
break;
}
}
private void CloudSpatialAnchorSession_LocateAnchorsCompleted(object sender, LocateAnchorsCompletedEventArgs args)
{
Debug.Log("ASA Info: Locate anchors completed. Watcher identifier: " + args.Watcher.Identifier);
}
/// <summary>
/// Called by GestureRecognizer when a tap is detected.
/// </summary>
/// <param name="eventArgs">The tap.</param>
public void HandleTap(TappedEventArgs eventArgs)
{
if (tapExecuted)
{
return;
}
tapExecuted = true;
Debug.Log("ASA Info: We will create a new anchor.");
//// Clean up any anchors that have been placed.
//CleanupObjects();
// Construct a Ray using forward direction of the HoloLens.
Ray GazeRay = new Ray(eventArgs.headPose.position, eventArgs.headPose.forward);
// Raycast to get the hit point in the real world.
RaycastHit hitInfo;
Physics.Raycast(GazeRay, out hitInfo, float.MaxValue);
this.CreateAndSaveSphere(hitInfo.point);
}
/// <summary>
/// Creates a sphere at the hit point, and then saves a CloudSpatialAnchor there.
/// </summary>
/// <param name="hitPoint">The hit point.</param>
protected virtual void CreateAndSaveSphere(Vector3 hitPoint)
{
// Create a white sphere.
GameObject spatialAnchorObj = GameObject.Instantiate(spherePrefab, hitPoint, Quaternion.identity) as GameObject;
spatialAnchorObj.AddComponent<WorldAnchor>();
sphereMaterial = spatialAnchorObj.GetComponent<MeshRenderer>().material;
sphereMaterial.color = Color.white;
Debug.Log("ASA Info: Created a local anchor.");
// Create the CloudSpatialAnchor.
currentCloudAnchor = new CloudSpatialAnchor();
// Set the LocalAnchor property of the CloudSpatialAnchor to the WorldAnchor component of our white sphere.
WorldAnchor worldAnchor = spatialAnchorObj.GetComponent<WorldAnchor>();
if (worldAnchor == null)
{
throw new Exception("ASA Error: Couldn't get the local anchor pointer.");
}
// Save the CloudSpatialAnchor to the cloud.
currentCloudAnchor.LocalAnchor = worldAnchor.GetNativeSpatialAnchorPtr();
//cloudAnchor.AppProperties[#"x"] = #"frame";
//cloudAnchor.AppProperties[#"label"] = #"my latest picture";
Task.Run(async () =>
{
// Wait for enough data about the environment.
while (recommendedForCreate < 1.0F)
{
await Task.Delay(330);
}
bool success = false;
try
{
QueueOnUpdate(() =>
{
// We are about to save the CloudSpatialAnchor to the Azure Spatial Anchors, turn it yellow.
sphereMaterial.color = Color.yellow;
});
await cloudSpatialAnchorSession.CreateAnchorAsync(currentCloudAnchor);
success = currentCloudAnchor != null;
if (success)
{
// Record the identifier to locate.
string cloudAnchorId = currentCloudAnchor.Identifier;
QueueOnUpdate(() =>
{
// Turn the sphere blue.
sphereMaterial.color = Color.blue;
});
Debug.Log("ASA Info: Saved anchor to Azure Spatial Anchors! Identifier: " + cloudAnchorId);
//Debug.Log("Created " + cloudAnchorId + " at pos: " + worldAnchor.transform.position);
//Debug.Log("Created " + cloudAnchorId + "at rot: " + worldAnchor.transform.rotation);
anchorIds.Add(cloudAnchorId);
cloudSpatialAnchorIdsObjects.Add(cloudAnchorId, spatialAnchorObj);
WriteIds();
}
else
{
sphereMaterial.color = Color.red;
Debug.LogError("ASA Error: Failed to save, but no exception was thrown.");
}
}
catch (Exception ex)
{
QueueOnUpdate(() =>
{
sphereMaterial.color = Color.red;
});
Debug.LogError("ASA Error: " + ex.Message);
}
// Allow the user to tap again to clear state and look for the anchor.
tapExecuted = false;
});
}
void WriteIds()
{
try
{
string fileContent = ""
//= ReadString();
;
foreach (string id in anchorIds)
{
fileContent += id + Environment.NewLine;
}
using (StreamWriter writer = new StreamWriter(new FileStream(pathName, FileMode.OpenOrCreate, FileAccess.Write)))
{
writer.Write(fileContent);
}
}
catch (Exception e)
{
Debug.LogError(e);
}
}
void getIds()
{
try
{
StreamReader reader = new StreamReader(pathName);
string line;
while ((line = reader.ReadLine()) != null)
{
anchorIds.Add(line);
}
reader.Close();
}
catch (FileNotFoundException e)
{
Debug.LogWarning("No AnchorId file found");
}
}
}
Is something wrong with the way the anchors are created or is this normal behavior?
It is not surprising behavior that sometimes the anchors lose their position and it could be the case that some of the anchors are relocalized after tracking is recovered, but not all of them. It could be helpful to add a script to the object you create for the anchor that shows the tracking state of the anchor attached to it. Here is a sample:-
using System;
using UnityEngine;
using UnityEngine.XR.WSA;
public class ShowTrackingState : MonoBehavior
{
WorldAnchor worldAnchor = null;
Material renderMaterial = null;
bool isTracking = false;
void Start()
{
renderMaterial = gameObject.GetComponent<Renderer>().material;
}
void OnDestroy()
{
if (renderMaterial != null)
{
Destroy(renderMaterial);
}
}
void Update()
{
if (worldAnchor == null)
{
worldAnchor = gameObject.GetComponent<worldAnchor>();
}
if (worldAnchor == null)
{
isTracking = false;
}
else
{
isTracking = worldAnchor.isLocated;
}
renderMaterial.color = isTracking ? Color.red : Color.green;
}
}
Also, a gentle reminder, it is a common mistake to forget to hide one's account id and key when posting questions on forums; but it is not secure so you might want to remove that part from your code snippet in the question :-)

AR core loading prefabs dynamically in unity

I have recently started dabbling in Unity with ARcore, I am using the HelloARController.cs that comes with ARcore 1.5 but I want to load a bundle from my server, I have two scripts
one called LoadAsset.cs which loads a bundle from a webserver and I have another called HelloARcontroller.cs which is basically the main part of the app, this allows me to preset prefabs that will be loaded when the screen is clicked
What I would like to do instead of loading one of the "Andy" objects is for it to pull a prefab from the webserver and use that instead
I have managed so far to load a prefab from my server at runtime but placing it is another matter
namespace GoogleARCore.Examples.HelloAR
{
using System.Collections.Generic;
using GoogleARCore;
using GoogleARCore.Examples.Common;
using UnityEngine;
using System;
using System.Collections;
#if UNITY_EDITOR
// Set up touch input propagation while using Instant Preview in the editor.
using Input = InstantPreviewInput;
#endif
/// <summary>
/// Controls the HelloAR example.
/// </summary>
public class HelloARController : MonoBehaviour
{
public string BundleURL; // -->
http://Myserver/public_http/Assets/AssetBundles/cube_prefab (path to the AssetBundle)
public string AssetName; // --> Cube_pref (name of the Asset prefab)
public int version;
/// <summary>
/// The first-person camera being used to render the passthrough camera image (i.e. AR background).
/// </summary>
public Camera FirstPersonCamera;
/// <summary>
/// A prefab for tracking and visualizing detected planes.
/// </summary>
public GameObject DetectedPlanePrefab;
/// <summary>
/// A model to place when a raycast from a user touch hits a plane.
/// </summary>
public GameObject AndyPlanePrefab;
/// <summary>
/// A model to place when a raycast from a user touch hits a feature point.
/// </summary>
public GameObject AndyPointPrefab;
/// <summary>
/// A gameobject parenting UI for displaying the "searching for planes" snackbar.
/// </summary>
public GameObject SearchingForPlaneUI;
/// <summary>
/// The rotation in degrees need to apply to model when the Andy model is placed.
/// </summary>
private const float k_ModelRotation = 180.0f;
/// <summary>
/// A list to hold all planes ARCore is tracking in the current frame. This object is used across
/// the application to avoid per-frame allocations.
/// </summary>
private List<DetectedPlane> m_AllPlanes = new List<DetectedPlane>();
/// <summary>
/// True if the app is in the process of quitting due to an ARCore connection error, otherwise false.
/// </summary>
private bool m_IsQuitting = false;
/// <summary>
/// The Unity Update() method.
/// </summary>
void Start()
{
StartCoroutine(DownloadAndCache());
}
IEnumerator DownloadAndCache()
{
// Load the AssetBundle file from Cache if it exists with the same version or download and store it in the cache
using (WWW www = WWW.LoadFromCacheOrDownload(BundleURL, version))
{
yield return www;
if (www.error != null)
throw new Exception("WWW download had an error:" + www.error);
AssetBundle bundle = www.assetBundle;
GameObject testing = bundle.LoadAsset(AssetName) as GameObject;
// Unload the AssetBundles compressed contents to conserve memory
bundle.Unload(false);
} // memory is freed from the web stream (www.Dispose() gets called implicitly)
}
I've declared all my variables and managed to get the Prefab to load up to this point ^^^^^
public void Update()
{
_UpdateApplicationLifecycle();
// Hide snackbar when currently tracking at least one plane.
Session.GetTrackables<DetectedPlane>(m_AllPlanes);
bool showSearchingUI = true;
for (int i = 0; i < m_AllPlanes.Count; i++)
{
if (m_AllPlanes[i].TrackingState == TrackingState.Tracking)
{
showSearchingUI = false;
break;
}
}
SearchingForPlaneUI.SetActive(showSearchingUI);
// If the player has not touched the screen, we are done with this update.
Touch touch;
if (Input.touchCount < 1 || (touch = Input.GetTouch(0)).phase != TouchPhase.Began)
{
return;
}
// Raycast against the location the player touched to search for planes.
TrackableHit hit;
TrackableHitFlags raycastFilter = TrackableHitFlags.PlaneWithinPolygon |
TrackableHitFlags.FeaturePointWithSurfaceNormal;
if (Frame.Raycast(touch.position.x, touch.position.y, raycastFilter, out hit))
{
// Use hit pose and camera pose to check if hittest is from the
// back of the plane, if it is, no need to create the anchor.
if ((hit.Trackable is DetectedPlane) &&
Vector3.Dot(FirstPersonCamera.transform.position - hit.Pose.position,
hit.Pose.rotation * Vector3.up) < 0)
{
Debug.Log("Hit at back of the current DetectedPlane");
}
else
{
This is where it loads the Andy prefabs, what I would like to do is swap the andy prefab out for the asset bundle
// Choose the Andy model for the Trackable that got hit.
GameObject prefab;
if (hit.Trackable is FeaturePoint)
{
prefab = AndyPlanePrefab;
}
else
{
prefab = AndyPointPrefab;
}
// Instantiate Andy model at the hit pose.
var andyObject = Instantiate(prefab, hit.Pose.position, hit.Pose.rotation);
// Compensate for the hitPose rotation facing away from the raycast (i.e. camera).
andyObject.transform.Rotate(0, k_ModelRotation, 0, Space.Self);
// Create an anchor to allow ARCore to track the hitpoint as understanding of the physical
// world evolves.
var anchor = hit.Trackable.CreateAnchor(hit.Pose);
// Make Andy model a child of the anchor.
andyObject.transform.parent = anchor.transform;
}
}
}
/// <summary>
/// Check and update the application lifecycle.
/// </summary>
private void _UpdateApplicationLifecycle()
{
// Exit the app when the 'back' button is pressed.
if (Input.GetKey(KeyCode.Escape))
{
Application.Quit();
}
// Only allow the screen to sleep when not tracking.
if (Session.Status != SessionStatus.Tracking)
{
const int lostTrackingSleepTimeout = 15;
Screen.sleepTimeout = lostTrackingSleepTimeout;
}
else
{
Screen.sleepTimeout = SleepTimeout.NeverSleep;
}
if (m_IsQuitting)
{
return;
}
// Quit if ARCore was unable to connect and give Unity some time for the toast to appear.
if (Session.Status == SessionStatus.ErrorPermissionNotGranted)
{
_ShowAndroidToastMessage("Camera permission is needed to run this application.");
m_IsQuitting = true;
Invoke("_DoQuit", 0.5f);
}
else if (Session.Status.IsError())
{
_ShowAndroidToastMessage("ARCore encountered a problem connecting. Please start the app again.");
m_IsQuitting = true;
Invoke("_DoQuit", 0.5f);
}
}
/// <summary>
/// Actually quit the application.
/// </summary>
private void _DoQuit()
{
Application.Quit();
}
/// <summary>
/// Show an Android toast message.
/// </summary>
/// <param name="message">Message string to show in the toast.</param>
private void _ShowAndroidToastMessage(string message)
{
AndroidJavaClass unityPlayer = new AndroidJavaClass("com.unity3d.player.UnityPlayer");
AndroidJavaObject unityActivity = unityPlayer.GetStatic<AndroidJavaObject>("currentActivity");
if (unityActivity != null)
{
AndroidJavaClass toastClass = new AndroidJavaClass("android.widget.Toast");
unityActivity.Call("runOnUiThread", new AndroidJavaRunnable(() =>
{
AndroidJavaObject toastObject = toastClass.CallStatic<AndroidJavaObject>("makeText", unityActivity,
message, 0);
toastObject.Call("show");
}));
}
}
}
}
My plan is to change the URL dynamically as thats just a public variable so that I can load a different prefab once the app is running, then change the url and load a different prefab.
Any help, advice greatly appreciated.

Movement in VR with Unity for Oculus Go

I am trying to build a VR app for Oculus go. I am trying to move OVRPlayerContoller by pointing at a position on the ground, clicking to set a target position and then your character moves towards it. The player controller does not move towards the pointed position even tho the object is created on that position.I have written this script for it and attached and attached it to OVRPlayerController.
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class ClickToMove : MonoBehaviour
{
private Vector3 targetPos; //This Vector3 will store the position where we click to move.
private bool Moving = false; //This bool keeps track of whether we are in the process of moving or not.
private GameObject targetInstance;
//The variables we want to customize. Added info headers to these for the Unity Editor.
[Header("Our Go controller object")]
public GameObject goController;
[Header("Movement Speed")]
public float speed = 1;
[Header("Stop When This Far Away From Target")]
public float haltDistance = 0;
[Header("Optional Target Object")]
public GameObject targetObj;
void Update()
{
MoveToTarget(); //Here we simply run our MoveToTarget method in the Update method.
//That way we don't clutter up the Update method with too much code.
}
void MoveToTarget() //Here we do the cluttering instead.
{
var ray = new Ray(goController.transform.position, goController.transform.forward); //Create a ray going from the goController position and in the Forward direction of the goController.
RaycastHit hitInfo; //Store info about what the ray hits.
Physics.Raycast(ray, out hitInfo, 100);
if (OVRInput.GetUp(OVRInput.Button.PrimaryIndexTrigger)) //If we release the trigger..
{
targetPos = hitInfo.point; //Make our targetPos assume the positional value of the hit point.
if (targetObj) //If we have specified a Target Object to mark where we click.
//If we didn't, then we don't want to try to instantiate it.
{
if (targetInstance) //If there is already a Target Object in the scene.
{
Destroy(targetInstance); //Destroy it.
}
targetInstance = Instantiate(targetObj, targetPos, transform.rotation); //Create our Target object at the position we clicked.
}
Moving = true; //And finally we set Moving to True.
}
if (Moving == true) //Since Moving is now true
{
transform.position = Vector3.MoveTowards(transform.position, new Vector3(targetPos.x, transform.position.y, targetPos.z), speed * Time.deltaTime); //Transform our x and z position to move towards the targetPos.
//Note that our y position is kept at default transform position since we only want to move along the ground plane.
}
if (Vector3.Distance(transform.position, targetPos) <= haltDistance + 1) //Check proximity to targetPos. Mainly useful to keep your player from setting a target position right next to say a building and then end up clipping through half of it.
{
if (targetInstance) //If we created a Target Object..
{
Destroy(targetInstance); //Then we want to destroy it when we reach it.
}
Moving = false; //Since we have now arrived at our target destination.
}
}
}
I created a cube prefab for the Target Object and Dragged the TrackedRemote Prefab(which contains Go controller model and script) onto the Go Contoller slot.
Then I created a ray from my controller using the script below and attached it to OVRCameraRig.
using UnityEngine;
using UnityEngine.Events;
public class VRRaycaster : MonoBehaviour
{
[System.Serializable]
public class Callback : UnityEvent<Ray, RaycastHit> { }
public Transform leftHandAnchor = null;
public Transform rightHandAnchor = null;
public Transform centerEyeAnchor = null;
public LineRenderer lineRenderer = null;
public float maxRayDistance = 500.0f;
public LayerMask excludeLayers;
public VRRaycaster.Callback raycastHitCallback;
void Awake()
{
if (leftHandAnchor == null)
{
Debug.LogWarning("Assign LeftHandAnchor in the inspector!");
GameObject left = GameObject.Find("LeftHandAnchor");
if (left != null)
{
leftHandAnchor = left.transform;
}
}
if (rightHandAnchor == null)
{
Debug.LogWarning("Assign RightHandAnchor in the inspector!");
GameObject right = GameObject.Find("RightHandAnchor");
if (right != null)
{
rightHandAnchor = right.transform;
}
}
if (centerEyeAnchor == null)
{
Debug.LogWarning("Assign CenterEyeAnchor in the inspector!");
GameObject center = GameObject.Find("CenterEyeAnchor");
if (center != null)
{
centerEyeAnchor = center.transform;
}
}
if (lineRenderer == null)
{
Debug.LogWarning("Assign a line renderer in the inspector!");
lineRenderer = gameObject.AddComponent<LineRenderer>();
lineRenderer.shadowCastingMode = UnityEngine.Rendering.ShadowCastingMode.Off;
lineRenderer.receiveShadows = false;
lineRenderer.widthMultiplier = 0.02f;
}
}
Transform Pointer
{
get
{
OVRInput.Controller controller = OVRInput.GetConnectedControllers();
if ((controller & OVRInput.Controller.LTrackedRemote) != OVRInput.Controller.None)
{
return leftHandAnchor;
}
else if ((controller & OVRInput.Controller.RTrackedRemote) != OVRInput.Controller.None)
{
return rightHandAnchor;
}
// If no controllers are connected, we use ray from the view camera.
// This looks super ackward! Should probably fall back to a simple reticle!
return centerEyeAnchor;
}
}
void Update()
{
Transform pointer = Pointer;
if (pointer == null)
{
return;
}
Ray laserPointer = new Ray(pointer.position, pointer.forward);
if (lineRenderer != null)
{
lineRenderer.SetPosition(0, laserPointer.origin);
lineRenderer.SetPosition(1, laserPointer.origin + laserPointer.direction * maxRayDistance);
}
RaycastHit hit;
if (Physics.Raycast(laserPointer, out hit, maxRayDistance, ~excludeLayers))
{
if (lineRenderer != null)
{
lineRenderer.SetPosition(1, hit.point);
}
if (raycastHitCallback != null)
{
raycastHitCallback.Invoke(laserPointer, hit);
}
}
}
}
The problem is that when I press the trigger, the GameObject is created at that point on the floor but the OVRPlayerController does not move towards it.
I have also attached my OVRPlayerController.cs script below:
using System;
using UnityEngine;
/// <summary>
/// Controls the player's movement in virtual reality.
/// </summary>
[RequireComponent(typeof(CharacterController))]
public class OVRPlayerController : MonoBehaviour
{
/// <summary>
/// The rate acceleration during movement.
/// </summary>
public float Acceleration = 0.1f;
/// <summary>
/// The rate of damping on movement.
/// </summary>
public float Damping = 0.3f;
/// <summary>
/// The rate of additional damping when moving sideways or backwards.
/// </summary>
public float BackAndSideDampen = 0.5f;
/// <summary>
/// The force applied to the character when jumping.
/// </summary>
public float JumpForce = 0.3f;
/// <summary>
/// The rate of rotation when using a gamepad.
/// </summary>
public float RotationAmount = 1.5f;
/// <summary>
/// The rate of rotation when using the keyboard.
/// </summary>
public float RotationRatchet = 45.0f;
/// <summary>
/// The player will rotate in fixed steps if Snap Rotation is enabled.
/// </summary>
[Tooltip("The player will rotate in fixed steps if Snap Rotation is enabled.")]
public bool SnapRotation = true;
/// <summary>
/// How many fixed speeds to use with linear movement? 0=linear control
/// </summary>
[Tooltip("How many fixed speeds to use with linear movement? 0=linear control")]
public int FixedSpeedSteps;
/// <summary>
/// If true, reset the initial yaw of the player controller when the Hmd pose is recentered.
/// </summary>
public bool HmdResetsY = true;
/// <summary>
/// If true, tracking data from a child OVRCameraRig will update the direction of movement.
/// </summary>
public bool HmdRotatesY = true;
/// <summary>
/// Modifies the strength of gravity.
/// </summary>
public float GravityModifier = 0.379f;
/// <summary>
/// If true, each OVRPlayerController will use the player's physical height.
/// </summary>
public bool useProfileData = true;
/// <summary>
/// The CameraHeight is the actual height of the HMD and can be used to adjust the height of the character controller, which will affect the
/// ability of the character to move into areas with a low ceiling.
/// </summary>
[NonSerialized]
public float CameraHeight;
/// <summary>
/// This event is raised after the character controller is moved. This is used by the OVRAvatarLocomotion script to keep the avatar transform synchronized
/// with the OVRPlayerController.
/// </summary>
public event Action<Transform> TransformUpdated;
/// <summary>
/// This bool is set to true whenever the player controller has been teleported. It is reset after every frame. Some systems, such as
/// CharacterCameraConstraint, test this boolean in order to disable logic that moves the character controller immediately
/// following the teleport.
/// </summary>
[NonSerialized] // This doesn't need to be visible in the inspector.
public bool Teleported;
/// <summary>
/// This event is raised immediately after the camera transform has been updated, but before movement is updated.
/// </summary>
public event Action CameraUpdated;
/// <summary>
/// This event is raised right before the character controller is actually moved in order to provide other systems the opportunity to
/// move the character controller in response to things other than user input, such as movement of the HMD. See CharacterCameraConstraint.cs
/// for an example of this.
/// </summary>
public event Action PreCharacterMove;
/// <summary>
/// When true, user input will be applied to linear movement. Set this to false whenever the player controller needs to ignore input for
/// linear movement.
/// </summary>
public bool EnableLinearMovement = true;
/// <summary>
/// When true, user input will be applied to rotation. Set this to false whenever the player controller needs to ignore input for rotation.
/// </summary>
public bool EnableRotation = true;
protected CharacterController Controller = null;
protected OVRCameraRig CameraRig = null;
private float MoveScale = 1.0f;
private Vector3 MoveThrottle = Vector3.zero;
private float FallSpeed = 0.0f;
private OVRPose? InitialPose;
public float InitialYRotation { get; private set; }
private float MoveScaleMultiplier = 1.0f;
private float RotationScaleMultiplier = 1.0f;
private bool SkipMouseRotation = true; // It is rare to want to use mouse movement in VR, so ignore the mouse by default.
private bool HaltUpdateMovement = false;
private bool prevHatLeft = false;
private bool prevHatRight = false;
private float SimulationRate = 60f;
private float buttonRotation = 0f;
private bool ReadyToSnapTurn; // Set to true when a snap turn has occurred, code requires one frame of centered thumbstick to enable another snap turn.
void Start()
{
// Add eye-depth as a camera offset from the player controller
var p = CameraRig.transform.localPosition;
p.z = OVRManager.profile.eyeDepth;
CameraRig.transform.localPosition = p;
}
void Awake()
{
Controller = gameObject.GetComponent<CharacterController>();
if (Controller == null)
Debug.LogWarning("OVRPlayerController: No CharacterController attached.");
// We use OVRCameraRig to set rotations to cameras,
// and to be influenced by rotation
OVRCameraRig[] CameraRigs = gameObject.GetComponentsInChildren<OVRCameraRig>();
if (CameraRigs.Length == 0)
Debug.LogWarning("OVRPlayerController: No OVRCameraRig attached.");
else if (CameraRigs.Length > 1)
Debug.LogWarning("OVRPlayerController: More then 1 OVRCameraRig attached.");
else
CameraRig = CameraRigs[0];
InitialYRotation = transform.rotation.eulerAngles.y;
}
void OnEnable()
{
OVRManager.display.RecenteredPose += ResetOrientation;
if (CameraRig != null)
{
CameraRig.UpdatedAnchors += UpdateTransform;
}
}
void OnDisable()
{
OVRManager.display.RecenteredPose -= ResetOrientation;
if (CameraRig != null)
{
CameraRig.UpdatedAnchors -= UpdateTransform;
}
}
void Update()
{
//Use keys to ratchet rotation
if (Input.GetKeyDown(KeyCode.Q))
buttonRotation -= RotationRatchet;
if (Input.GetKeyDown(KeyCode.E))
buttonRotation += RotationRatchet;
}
protected virtual void UpdateController()
{
if (useProfileData)
{
if (InitialPose == null)
{
// Save the initial pose so it can be recovered if useProfileData
// is turned off later.
InitialPose = new OVRPose()
{
position = CameraRig.transform.localPosition,
orientation = CameraRig.transform.localRotation
};
}
var p = CameraRig.transform.localPosition;
if (OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.EyeLevel)
{
p.y = OVRManager.profile.eyeHeight - (0.5f * Controller.height) + Controller.center.y;
}
else if (OVRManager.instance.trackingOriginType == OVRManager.TrackingOrigin.FloorLevel)
{
p.y = -(0.5f * Controller.height) + Controller.center.y;
}
CameraRig.transform.localPosition = p;
}
else if (InitialPose != null)
{
// Return to the initial pose if useProfileData was turned off at runtime
CameraRig.transform.localPosition = InitialPose.Value.position;
CameraRig.transform.localRotation = InitialPose.Value.orientation;
InitialPose = null;
}
CameraHeight = CameraRig.centerEyeAnchor.localPosition.y;
if (CameraUpdated != null)
{
CameraUpdated();
}
UpdateMovement();
Vector3 moveDirection = Vector3.zero;
float motorDamp = (1.0f + (Damping * SimulationRate * Time.deltaTime));
MoveThrottle.x /= motorDamp;
MoveThrottle.y = (MoveThrottle.y > 0.0f) ? (MoveThrottle.y / motorDamp) : MoveThrottle.y;
MoveThrottle.z /= motorDamp;
moveDirection += MoveThrottle * SimulationRate * Time.deltaTime;
// Gravity
if (Controller.isGrounded && FallSpeed <= 0)
FallSpeed = ((Physics.gravity.y * (GravityModifier * 0.002f)));
else
FallSpeed += ((Physics.gravity.y * (GravityModifier * 0.002f)) * SimulationRate * Time.deltaTime);
moveDirection.y += FallSpeed * SimulationRate * Time.deltaTime;
if (Controller.isGrounded && MoveThrottle.y <= transform.lossyScale.y * 0.001f)
{
// Offset correction for uneven ground
float bumpUpOffset = Mathf.Max(Controller.stepOffset, new Vector3(moveDirection.x, 0, moveDirection.z).magnitude);
moveDirection -= bumpUpOffset * Vector3.up;
}
if (PreCharacterMove != null)
{
PreCharacterMove();
Teleported = false;
}
Vector3 predictedXZ = Vector3.Scale((Controller.transform.localPosition + moveDirection), new Vector3(1, 0, 1));
// Move contoller
Controller.Move(moveDirection);
Vector3 actualXZ = Vector3.Scale(Controller.transform.localPosition, new Vector3(1, 0, 1));
if (predictedXZ != actualXZ)
MoveThrottle += (actualXZ - predictedXZ) / (SimulationRate * Time.deltaTime);
}
public virtual void UpdateMovement()
{
if (HaltUpdateMovement)
return;
if (EnableLinearMovement)
{
bool moveForward = Input.GetKey(KeyCode.W) || Input.GetKey(KeyCode.UpArrow);
bool moveLeft = Input.GetKey(KeyCode.A) || Input.GetKey(KeyCode.LeftArrow);
bool moveRight = Input.GetKey(KeyCode.D) || Input.GetKey(KeyCode.RightArrow);
bool moveBack = Input.GetKey(KeyCode.S) || Input.GetKey(KeyCode.DownArrow);
bool dpad_move = false;
if (OVRInput.Get(OVRInput.Button.DpadUp))
{
moveForward = true;
dpad_move = true;
}
if (OVRInput.Get(OVRInput.Button.DpadDown))
{
moveBack = true;
dpad_move = true;
}
MoveScale = 1.0f;
if ((moveForward && moveLeft) || (moveForward && moveRight) ||
(moveBack && moveLeft) || (moveBack && moveRight))
MoveScale = 0.70710678f;
// No positional movement if we are in the air
if (!Controller.isGrounded)
MoveScale = 0.0f;
MoveScale *= SimulationRate * Time.deltaTime;
// Compute this for key movement
float moveInfluence = Acceleration * 0.1f * MoveScale * MoveScaleMultiplier;
// Run!
if (dpad_move || Input.GetKey(KeyCode.LeftShift) || Input.GetKey(KeyCode.RightShift))
moveInfluence *= 2.0f;
Quaternion ort = transform.rotation;
Vector3 ortEuler = ort.eulerAngles;
ortEuler.z = ortEuler.x = 0f;
ort = Quaternion.Euler(ortEuler);
if (moveForward)
MoveThrottle += ort * (transform.lossyScale.z * moveInfluence * Vector3.forward);
if (moveBack)
MoveThrottle += ort * (transform.lossyScale.z * moveInfluence * BackAndSideDampen * Vector3.back);
if (moveLeft)
MoveThrottle += ort * (transform.lossyScale.x * moveInfluence * BackAndSideDampen * Vector3.left);
if (moveRight)
MoveThrottle += ort * (transform.lossyScale.x * moveInfluence * BackAndSideDampen * Vector3.right);
moveInfluence = Acceleration * 0.1f * MoveScale * MoveScaleMultiplier;
#if !UNITY_ANDROID // LeftTrigger not avail on Android game pad
moveInfluence *= 1.0f + OVRInput.Get(OVRInput.Axis1D.PrimaryIndexTrigger);
#endif
Vector2 primaryAxis = OVRInput.Get(OVRInput.Axis2D.PrimaryThumbstick);
// If speed quantization is enabled, adjust the input to the number of fixed speed steps.
if (FixedSpeedSteps > 0)
{
primaryAxis.y = Mathf.Round(primaryAxis.y * FixedSpeedSteps) / FixedSpeedSteps;
primaryAxis.x = Mathf.Round(primaryAxis.x * FixedSpeedSteps) / FixedSpeedSteps;
}
if (primaryAxis.y > 0.0f)
MoveThrottle += ort * (primaryAxis.y * transform.lossyScale.z * moveInfluence * Vector3.forward);
if (primaryAxis.y < 0.0f)
MoveThrottle += ort * (Mathf.Abs(primaryAxis.y) * transform.lossyScale.z * moveInfluence *
BackAndSideDampen * Vector3.back);
if (primaryAxis.x < 0.0f)
MoveThrottle += ort * (Mathf.Abs(primaryAxis.x) * transform.lossyScale.x * moveInfluence *
BackAndSideDampen * Vector3.left);
if (primaryAxis.x > 0.0f)
MoveThrottle += ort * (primaryAxis.x * transform.lossyScale.x * moveInfluence * BackAndSideDampen *
Vector3.right);
}
if (EnableRotation)
{
Vector3 euler = transform.rotation.eulerAngles;
float rotateInfluence = SimulationRate * Time.deltaTime * RotationAmount * RotationScaleMultiplier;
bool curHatLeft = OVRInput.Get(OVRInput.Button.PrimaryShoulder);
if (curHatLeft && !prevHatLeft)
euler.y -= RotationRatchet;
prevHatLeft = curHatLeft;
bool curHatRight = OVRInput.Get(OVRInput.Button.SecondaryShoulder);
if (curHatRight && !prevHatRight)
euler.y += RotationRatchet;
prevHatRight = curHatRight;
euler.y += buttonRotation;
buttonRotation = 0f;
#if !UNITY_ANDROID || UNITY_EDITOR
if (!SkipMouseRotation)
euler.y += Input.GetAxis("Mouse X") * rotateInfluence * 3.25f;
#endif
if (SnapRotation)
{
if (OVRInput.Get(OVRInput.Button.SecondaryThumbstickLeft))
{
if (ReadyToSnapTurn)
{
euler.y -= RotationRatchet;
ReadyToSnapTurn = false;
}
}
else if (OVRInput.Get(OVRInput.Button.SecondaryThumbstickRight))
{
if (ReadyToSnapTurn)
{
euler.y += RotationRatchet;
ReadyToSnapTurn = false;
}
}
else
{
ReadyToSnapTurn = true;
}
}
else
{
Vector2 secondaryAxis = OVRInput.Get(OVRInput.Axis2D.SecondaryThumbstick);
euler.y += secondaryAxis.x * rotateInfluence;
}
transform.rotation = Quaternion.Euler(euler);
}
}
/// <summary>
/// Invoked by OVRCameraRig's UpdatedAnchors callback. Allows the Hmd rotation to update the facing direction of the player.
/// </summary>
public void UpdateTransform(OVRCameraRig rig)
{
Transform root = CameraRig.trackingSpace;
Transform centerEye = CameraRig.centerEyeAnchor;
if (HmdRotatesY && !Teleported)
{
Vector3 prevPos = root.position;
Quaternion prevRot = root.rotation;
transform.rotation = Quaternion.Euler(0.0f, centerEye.rotation.eulerAngles.y, 0.0f);
root.position = prevPos;
root.rotation = prevRot;
}
UpdateController();
if (TransformUpdated != null)
{
TransformUpdated(root);
}
}
/// <summary>
/// Jump! Must be enabled manually.
/// </summary>
public bool Jump()
{
if (!Controller.isGrounded)
return false;
MoveThrottle += new Vector3(0, transform.lossyScale.y * JumpForce, 0);
return true;
}
/// <summary>
/// Stop this instance.
/// </summary>
public void Stop()
{
Controller.Move(Vector3.zero);
MoveThrottle = Vector3.zero;
FallSpeed = 0.0f;
}
/// <summary>
/// Gets the move scale multiplier.
/// </summary>
/// <param name="moveScaleMultiplier">Move scale multiplier.</param>
public void GetMoveScaleMultiplier(ref float moveScaleMultiplier)
{
moveScaleMultiplier = MoveScaleMultiplier;
}
/// <summary>
/// Sets the move scale multiplier.
/// </summary>
/// <param name="moveScaleMultiplier">Move scale multiplier.</param>
public void SetMoveScaleMultiplier(float moveScaleMultiplier)
{
MoveScaleMultiplier = moveScaleMultiplier;
}
/// <summary>
/// Gets the rotation scale multiplier.
/// </summary>
/// <param name="rotationScaleMultiplier">Rotation scale multiplier.</param>
public void GetRotationScaleMultiplier(ref float rotationScaleMultiplier)
{
rotationScaleMultiplier = RotationScaleMultiplier;
}
/// <summary>
/// Sets the rotation scale multiplier.
/// </summary>
/// <param name="rotationScaleMultiplier">Rotation scale multiplier.</param>
public void SetRotationScaleMultiplier(float rotationScaleMultiplier)
{
RotationScaleMultiplier = rotationScaleMultiplier;
}
/// <summary>
/// Gets the allow mouse rotation.
/// </summary>
/// <param name="skipMouseRotation">Allow mouse rotation.</param>
public void GetSkipMouseRotation(ref bool skipMouseRotation)
{
skipMouseRotation = SkipMouseRotation;
}
/// <summary>
/// Sets the allow mouse rotation.
/// </summary>
/// <param name="skipMouseRotation">If set to <c>true</c> allow mouse rotation.</param>
public void SetSkipMouseRotation(bool skipMouseRotation)
{
SkipMouseRotation = skipMouseRotation;
}
/// <summary>
/// Gets the halt update movement.
/// </summary>
/// <param name="haltUpdateMovement">Halt update movement.</param>
public void GetHaltUpdateMovement(ref bool haltUpdateMovement)
{
haltUpdateMovement = HaltUpdateMovement;
}
/// <summary>
/// Sets the halt update movement.
/// </summary>
/// <param name="haltUpdateMovement">If set to <c>true</c> halt update movement.</param>
public void SetHaltUpdateMovement(bool haltUpdateMovement)
{
HaltUpdateMovement = haltUpdateMovement;
}
/// <summary>
/// Resets the player look rotation when the device orientation is reset.
/// </summary>
public void ResetOrientation()
{
if (HmdResetsY && !HmdRotatesY)
{
Vector3 euler = transform.rotation.eulerAngles;
euler.y = InitialYRotation;
transform.rotation = Quaternion.Euler(euler);
}
}
}

UNet player controller issues

I've got a player controller that should work over UNET. I must not understand something though as any remote players joining a game can't control their character.
The hosting local player can control his/her character just fine.
Basically the way I think this is working is that in Update the local player can press keys. Those keypresses issue Commands to the server where synced bools are set.
In FixedUpdate the server moves the Rigidbody around based on the set bools. On the player object I have a NetworkTransform so any movement the server does should be sent back to the client.
using UnityEngine;
using UnityEngine.Networking;
using System.Collections;
[RequireComponent(typeof(NetworkIdentity))]
public class PlayerController : NetworkBehaviour {
public GameObject NormalBullet;
public Vector3 size = new Vector3(0.25f, 0.25f, 0.25f);
private float speed = 8;
private float angularSpeed = 35;
private float jumpForce = 10;
private Rigidbody _rigidbody;
private Map _map;
private NHNetworkedPool _pool;
private bool _active = false;
private Vector3 _lastPosition;
[SyncVar]
private bool _moveForward;
[SyncVar]
private bool _moveBackward;
[SyncVar]
private bool _turnLeft;
[SyncVar]
private bool _turnRight;
[SyncVar]
private bool _jump;
[SyncVar]
private bool _isgrounded;
[SyncVar]
private bool _isFireing;
void Awake () {
Messenger.AddListener ("MAP_LOADED", OnMapLoaded);
_rigidbody = gameObject.GetComponent<Rigidbody> ();
_map = GameObject.Find ("Map").GetComponent<Map> ();
Physics.IgnoreLayerCollision(LayerMask.NameToLayer("Players"), LayerMask.NameToLayer("Players"), true);
}
override public void OnStartClient () {
_rigidbody.position = new Vector3 (-100, -100, -100);
if (NetworkServer.active) {
_pool = FindObjectOfType<NHNetworkedPool> ();
}
}
/// <summary>
/// Once the board is built, hookup the camera if this is the local player
/// and set the player as active.
/// </summary>
void OnMapLoaded () {
if (isLocalPlayer) {
// Hook up the camera
PlayerCamera cam = Camera.main.GetComponent<PlayerCamera>();
cam.target = transform;
// Move the player to the it's spawn location
CmdSpawn();
}
// Set the player as active
_active = true;
}
/// <summary>
/// Only and active local player should be able to
/// issue commands for the player
/// </summary>
void Update () {
if (!isLocalPlayer || !_active) {
return;
}
if (Input.GetKeyDown ("up")) {
CmdSetMoveForward (true);
}
if (Input.GetKeyUp ("up")) {
CmdSetMoveForward (false);
}
if (Input.GetKeyDown ("down")) {
CmdSetMoveBackward (true);
}
if (Input.GetKeyUp ("down")) {
CmdSetMoveBackward (false);
}
if (Input.GetKeyDown ("left")) {
CmdSetTurnLeft (true);
}
if (Input.GetKeyUp ("left")) {
CmdSetTurnLeft (false);
}
if (Input.GetKeyDown ("right")) {
CmdSetTurnRight (true);
}
if (Input.GetKeyUp ("right")) {
CmdSetTurnRight (false);
}
if (Input.GetKeyDown (KeyCode.Space)) {
CmdSetJump (true);
}
if (Input.GetKeyUp (KeyCode.Space)) {
CmdSetJump (false);
}
if (Input.GetKeyDown (KeyCode.LeftShift)) {
CmdSetShooting(true);
}
if (Input.GetKeyUp (KeyCode.LeftShift)) {
CmdSetShooting(false);
}
}
/// <summary>
/// Only the server should update the player's location
/// the transform is synced to the clients
/// </summary>
void FixedUpdate () {
if (!isServer) {
return;
}
if (_moveForward) {
float moveAmount = speed * Time.deltaTime;
_rigidbody.MovePosition(_rigidbody.position + _rigidbody.transform.forward * moveAmount);
}
if (_moveBackward) {
float moveAmount = (-speed * 0.6f) * Time.deltaTime;
_rigidbody.MovePosition(_rigidbody.position + _rigidbody.transform.forward * moveAmount);
}
if (_turnLeft) {
Quaternion rotateAmount = Quaternion.Euler(new Vector3(0f, -angularSpeed, 0f) * Time.deltaTime);
_rigidbody.MoveRotation(_rigidbody.rotation * rotateAmount);
}
if (_turnRight) {
Quaternion rotateAmount = Quaternion.Euler(new Vector3(0f, angularSpeed, 0f) * Time.deltaTime);
_rigidbody.MoveRotation(_rigidbody.rotation * rotateAmount);
}
if (_jump && _isgrounded) {
_rigidbody.AddForce(Vector3.up * 250);
}
}
void OnCollisionStay (Collision collision) {
if(collision.gameObject.tag.ToUpper() == "GROUND") {
_isgrounded = true;
}
}
void OnCollisionExit (Collision collision) {
if(collision.gameObject.tag.ToUpper() == "GROUND") {
_isgrounded = false;
}
}
/// <summary>
/// Client -> Server
/// Move the player to a spawn location
/// </summary>
void CmdSpawn() {
_rigidbody.position = _map.GetPlayerSpawn();
_rigidbody.velocity = Vector3.zero;
}
/// <summary>
/// Client -> Server
/// Set the forward move of the player on/off
/// </summary>
[Command]
void CmdSetMoveForward (bool active) {
_moveForward = active;
}
/// <summary>
/// Client -> Server
/// Set the backward of the player on/off
/// </summary>
[Command]
void CmdSetMoveBackward (bool active) {
_moveBackward = active;
}
/// <summary>
/// Client -> Server
/// Set the left turn of the player on/off
/// </summary>
[Command]
void CmdSetTurnLeft (bool active) {
_turnLeft = active;
}
/// <summary>
/// Client -> Server
/// Set the right turn of the player on/off
/// </summary>
[Command]
void CmdSetTurnRight (bool active) {
_turnRight = active;
}
/// <summary>
/// Client -> Server
/// Set the jumpping of the player on/off
/// </summary>
[Command]
void CmdSetJump (bool active) {
_jump = active;
}
/// <summary>
/// Client -> Server
/// Set shooting weapon on/off
/// </summary>
[Command]
void CmdSetShooting (bool active) {
_isFireing = true;
}
}
You should not do the movement on the server. Rewrite it so that the movement is calculated and executed on the client.
Then add an NetworkTransform component to the player and it should work.
Only the Fire method has to be a Command. But because I don't know what actually happens when _isFireing = true I can't tell you what you should write exactly ;)
EDIT: You also need a NetworkIdentity component on the player if you don't have one

Monogame fixed moving model disappears

So I'm trying to create an interactive environment using a 3D model. I have the model and camera moving on a fixed Z-axis increment but after 3 or so seconds the model just disappears. Not sure what's happening, help is very appreciated.
My Game code is posted below.
namespace model_viewer
{
/// <summary>
/// This is the main type for your game
/// </summary>
public class Game1 : Game
{
GraphicsDeviceManager graphics;
SpriteBatch spriteBatch;
//Loads the ship
Model Ship;
//Moves the ship and the camera together
float moveCamera;
float moveShip;
//moves the ship in the user's direction
float keyMoveX;
float keyMoveY;
public Game1()
: base()
{
graphics = new GraphicsDeviceManager(this);
Content.RootDirectory = "Content";
}
/// <summary>
/// Allows the game to perform any initialization it needs to before starting to run.
/// This is where it can query for any required services and load any non-graphic
/// related content. Calling base.Initialize will enumerate through any components
/// and initialize them as well.
/// </summary>
protected override void Initialize()
{
moveCamera = -3;
base.Initialize();
}
/// <summary>
/// LoadContent will be called once per game and is the place to load
/// all of your content.
/// </summary>
protected override void LoadContent()
{
// Create a new SpriteBatch, which can be used to draw textures.
spriteBatch = new SpriteBatch(GraphicsDevice);
Ship = Content.Load<Model>("Graphics/Ship");
// TODO: use this.Content to load your game content here
}
/// <summary>
/// UnloadContent will be called once per game and is the place to unload
/// all content.
/// </summary>
protected override void UnloadContent()
{
// TODO: Unload any non ContentManager content here
}
/// <summary>
/// Allows the game to run logic such as updating the world,
/// checking for collisions, gathering input, and playing audio.
/// </summary>
/// <param name="gameTime">Provides a snapshot of timing values.</param>
protected override void Update(GameTime gameTime)
{
if (GamePad.GetState(PlayerIndex.One).Buttons.Back == ButtonState.Pressed || Keyboard.GetState().IsKeyDown(Keys.Escape))
Exit();
moveCamera += 0.005f;
moveShip += 0.005f;
base.Update(gameTime);
}
/// <summary>
/// This is called when the game should draw itself.
/// </summary>
/// <param name="gameTime">Provides a snapshot of timing values.</param>
protected override void Draw(GameTime gameTime)
{
GraphicsDevice.Clear(Color.CornflowerBlue);
Matrix proj = Matrix.CreatePerspectiveFieldOfView(MathHelper.PiOver2, 1, 0.1f, 10000.0f);
Matrix view = Matrix.CreateLookAt(
new Vector3(0, 0, moveCamera),
Vector3.Zero,
Vector3.Up);
float scale = 1.0f / Ship.Meshes[0].BoundingSphere.Radius;
Matrix world = Matrix.CreateScale(scale) * Matrix.CreateRotationY(MathHelper.ToRadians(180)) * Matrix.CreateTranslation(new Vector3(0, 0, moveShip));
Ship.Draw(world, view, proj);
// TODO: Add your drawing code here
base.Draw(gameTime);
}
}
}
I found the answer, my 'view' matrix was targeting a point that wasn't moving with my model and was rotating to view the point.