For this setup in Unity, there is a circular dial. The values of the rotation are being used to scale the model. The actual scaling code is removed because there is a bug in the way zAngle value is calculated. Ideally it should be in steady increments in all directions, but here, the values jump across horizontal and vertical axes but they grow slowly when the dial is dragged in other directions.
Is there a recommended practice for such cases when using the screen drag position to calculate the rotation value?
zAngle is the final angle value used to rotate the dial and would be also used after some clamping to scale the model, but that is not the requirement here.
public class CircularScaleDial : MonoBehaviour, IBeginDragHandler, IDragHandler, IEndDragHandler
{
RectTransform rectTransform;
public GameObject model;
[SerializeField] Vector2 delta = Vector2.one;
[SerializeField] CanvasScaler canvasScaler;
private Vector3 startPosition;
[SerializeField] int zAngle;
Vector3 gd1 = Vector3.zero;
private void Awake()
{
rectTransform = GetComponent<RectTransform>();
}
public void OnBeginDrag(PointerEventData eventData)
{
startPosition = GetDelta_3(eventData);
}
int GetAngleFromPosition(Vector2 pos) {
var angleRadians = Mathf.Atan2(pos.y, pos.x);
var angleDegrees = angleRadians * Mathf.Rad2Deg;
return (int)angleDegrees;
}
Vector3 GetDelta_3(PointerEventData data)
{
Vector3 pos = new Vector3(data.position.x, data.position.y, 0);
Vector3 s2vp = Camera.main.ScreenToViewportPoint(pos);
return s2vp;
}
public void OnDrag(PointerEventData data)
{
if (data.dragging)
{
gd1 = GetDelta_3(data);
delta.x = gd1.x;
delta.y = gd1.y;
zAngle = GetAngleFromPosition(delta);
rectTransform.localRotation = Quaternion.Euler(new Vector3(0, 0, zAngle));
}
}
}
It was sort of fixed with a simpler code. Made other tweaks but this was the main change. The essence here is to resolve the delta along y. For this case I just take the y component but ideally we would take a dot product of two values to resolve one along the other.
public void OnBeginDrag(PointerEventData eventData)
{
startV2 = eventData.position;
}
public void OnDrag(PointerEventData data)
{
if (data.dragging)
{
endV2 = data.position;
Vector2 deltaV2 = endV2 - startV2;
deltaV2 = deltaV2.normalized;
rectTransform.Rotate(new Vector3(0, 0, -deltaV2.y), Space.Self);
startV2 = endV2;
}
}
Related
Create a new project, add a sphere, a rigidbody and add this small script. Then add 2 points in the points list. This script does nothing but moving the object each points of the list.
using System;
using UnityEngine;
public class MoveAlongPath : MonoBehaviour
{
public Vector3[] points;
public float speed = 1.0f;
public float minDistance = Vector3.kEpsilon;
[SerializeField] private Vector3 _posCurrent;
[SerializeField] private Vector3 _posNext;
[SerializeField] private Vector3 _newPos;
[SerializeField] private int _currentIndex;
[SerializeField] private Vector3 _rotate;
[SerializeField] private Vector3 _oldPosition;
[SerializeField] private Rigidbody _rigidbody;
private void NextPoint()
{
_posCurrent = points[_currentIndex];
transform.position = _posCurrent;
_currentIndex++;
if (_currentIndex == points.Length) {
_currentIndex = 0;
}
_posNext = points[_currentIndex];
}
private void Start()
{
_rigidbody = GetComponent<Rigidbody>();
NextPoint();
}
private void FixedUpdate()
{
Transform t = transform;
_oldPosition = t.position;
float singleStep = speed * Time.fixedDeltaTime;
_newPos = new Vector3(
_oldPosition.x + singleStep,
_oldPosition.y + singleStep,
_oldPosition.z + singleStep
);
_rigidbody.MovePosition(_newPos);
if (Vector3.Distance(_newPos, _posNext) <= minDistance) {
/* close enough -> next point: */
NextPoint();
}
}
}
Well... it doesn't work. I've tried many ways:
using Update() instead of FixedUpdate() - but AFAIK Update() should only handle inputs and FixedUpdate() should make actual 3D physics computation
using transform.position = _newPos; doesn't work, and MovePosition() doesn't work too...
I'm stuck!
You're not moving in a meaningful direction, you're just moving +1 unit per second on each axis.
Try
var direction = _posNext - _posCurrent;
This is the direction, but if you moved that vector you'd be there in one tick. What you want is the normalized unit vector, which has a magnitude of 1, then you can multiply your speed by that.
I'm writing this on a cell phone so I may not have the methods exact:
var singleStep = speed * Time.fixedDeltaTime * direction.normalized;
And now you want to take the SMALLER of your direction or singleStep. It's extremely likely that your singleStep would overshoot the target, and you'll know you're going to overshoot because the singleStep is a bigger step than just going to the target.
if(singleStep.magnitude > direction.magnitude)
{
singleStep = direction; // don't overshoot
}
Then you can move and that should get it :)
_newPos = _oldPosition + singleStep;
_rigidbody.MovePosition(_newPos);
Edited by OP: here's the full code of the working solution, thanks again!
using UnityEngine;
public class MoveAlongPath : MonoBehaviour
{
public Vector3[] points;
public float speed = 1.0f;
public float minDistance = Vector3.kEpsilon;
private SphereCollider _c;
[SerializeField] private Vector3 _posStart;
[SerializeField] private Vector3 _posEnd;
[SerializeField] private int _currentIndex;
[SerializeField] private Rigidbody _rigidbody;
[SerializeField] private Vector3 _direction;
private void NextPoint()
{
_posStart = points[_currentIndex];
transform.position = _posStart;
_currentIndex = ++_currentIndex % points.Length;
_posEnd = points[_currentIndex];
_direction = _posEnd - _posStart;
_direction = _direction.normalized;
Debug.Log("Going from "+_posStart+" -> to -> "+_posEnd);
}
private void Start()
{
_rigidbody = GetComponent<Rigidbody>();
NextPoint();
}
private void FixedUpdate()
{
Vector3 step = speed * Time.fixedDeltaTime * _direction.normalized;
if(step.magnitude > _direction.magnitude) {
step = _direction; // don't overshoot
}
Vector3 newPos = transform.position + step;
_rigidbody.MovePosition(newPos);
if (Vector3.Distance(_posEnd, transform.position) < minDistance) {
/* close enough -> next point: */
NextPoint();
}
}
}
What to change here to my character moves forward and can also jump?
public class PlayerJump : MonoBehaviour {
public float jumpForce = 10f;
private Rigidbody2D myRB;
public float speed = 2f;
void Start () {
myRB = transform.GetComponent<Rigidbody2D> ();
}
public void FixedUpdate () {
myRB.velocity = Vector2.right * speed;
}
public void Jump () {
myRB.velocity = new Vector2 (myRB.velocity.x, jumpForce);
}
}
Consider using rb.AddForce(transform.up * jumpHeight, ForceMode2D.Impulse); what's happening is that you are resetting your vertical velocity when you are moving. So you also need to take into account your current velocity on your y axis, using myRB.velocity = new Vector2(speed, myRB.velocity.y);
Vector2.right is Vector2(1, 0), so all your vertical velocity sets to 0 in fixed update. For moving, you need something similar what you have for jump (keep your vertical velocity).
myRB.velocity = new Vector2(speed, myRB.velocity.y);
This was probably asked a dozen of times, but I can't seem to find the answer anywhere.
I have a dedicated server for a space game written in C# console application. The problems that I'm facing is the synchronisation of GameObject rotations. I'm suspecting the issue being related to gimbal lock, but I am not sure.
Here I have my player movement/rotation controller:
public class PlayerMovement : MonoBehaviour {
[SerializeField] float maxSpeed = 40f;
[SerializeField] float shipRotationSpeed = 60f;
Transform characterTransform;
void Awake()
{
characterTransform = transform;
}
void Update()
{
Turn();
Thrust();
}
float Speed() {
return maxSpeed;
}
void Turn() {
float rotX = shipRotationSpeed * Time.deltaTime * CrossPlatformInputManager.GetAxis("Vertical");
float rotY = shipRotationSpeed * Time.deltaTime * CrossPlatformInputManager.GetAxis("Horizontal");
characterTransform.Rotate(-rotX, rotY, 0);
}
void Thrust() {
if (CrossPlatformInputManager.GetAxis("Move") > 0) {
characterTransform.position += shipTransform.forward * Speed() * Time.deltaTime * CrossPlatformInputManager.GetAxis("Move");
}
}
}
This script is applied to my character object which is a ship. Note that the character object has a child object which is the ship itself and has fixed rotation and position that do not change. When character has moved/rotated I send the following to the server: position(x, y, z) and rotation(x, y, z, w).
Now here is the actual script that receives network packet information and updates the other players in game:
public class CharacterObject : MonoBehaviour {
[SerializeField] GameObject shipModel;
public int guid;
public int characterId;
public string name;
public int shipId;
Vector3 realPosition;
Quaternion realRotation;
public void Awake() {
}
public int Guid { get { return guid; } }
public int CharacterId { get { return characterId; } }
void Start () {
realPosition = transform.position;
realRotation = transform.rotation;
}
void Update () {
// Do nothing
}
internal void LoadCharacter(SNCharacterUpdatePacket cuPacket) {
guid = cuPacket.CharacterGuid;
characterId = cuPacket.CharacterId;
name = cuPacket.CharacterName;
shipId = cuPacket.ShipId;
realPosition = new Vector3(cuPacket.ShipPosX, cuPacket.ShipPosY, cuPacket.ShipPosZ);
realRotation = new Quaternion(cuPacket.ShipRotX, cuPacket.ShipRotY, cuPacket.ShipRotZ, cuPacket.ShipRotW);
UpdateTransform();
Instantiate(Resources.Load("Ships/Ship1/Ship1"), shipModel.transform);
}
internal void UpdateCharacter(SNCharacterUpdatePacket cuPacket) {
realPosition = new Vector3(cuPacket.ShipPosX, cuPacket.ShipPosY, cuPacket.ShipPosZ);
realRotation = new Quaternion(cuPacket.ShipRotX, cuPacket.ShipRotY, cuPacket.ShipRotZ, cuPacket.ShipRotW);
UpdateTransform();
}
void UpdateTransform() {
transform.position = Vector3.Lerp(transform.position, realPosition, 0.1f);
transform.rotation = Quaternion.Lerp(transform.rotation, realRotation, 0.5f);
}
}
Do you see anything wrong with the code ?
My experience with 2 players in game is the following:
When I start the game with two players they spawn at the same location (0,0,0) and same rotation (0,0,0).
Lets say that The other player is rotating continuously around the X-axis only. What I am experiencing is:
First 90 deg. of rotation it renders fine.
Next 180 deg. of rotation the object stays in place
The last 90 deg. of rotation renders fine
In the first version I did not send the 'w' value of Quaternion, but then added it in the second version and it did not help. Note that there is no restrictions in rotation and users can rotate endlessly in all directions.
Btw the positioning works fine.
I might not understand Quaternions fully, but I thought that they had a purpose in avoiding the gimbal lock issue.
Any help would be appreciated.
To answer my own question. I used transform.localEulerAngles instead of transform.rotation, and everything seems to work just fine.
Now the only thing that I am unsure is if gimbal lock could happen with this change.
I'm working on maze game for Android in Unity 5.1.1f1 and I have troubles with controlling my ball with accelerometer. At start I tried:
public class PlayerController : MonoBehaviour {
public GameObject sphere;
public Camera camera;
public float speed=200;
private Rigidbody myRigidBody;
void Start()
{
myRigidBody = gameObject.GetComponent<Rigidbody> ();
}
void FixedUpdate()
{
float moveH = Input.acceleration.x;
float moveV = -Input.acceleration.z;
Vector3 move = new Vector3 (moveH, 0.0f, moveV);
myRigidBody.AddForce (move * speed*Time.deltaTime);
}
}
But the ball is not moving as it should. Then I tried another solution. But my ball still doesn't move right. It seems like it's sometimes hard to move left/right/forward/backward, also it's possible my ball will rotate in the opposite direction.
public class PlayerController : MonoBehaviour {
public float speedAc = 10;
//accelerometer
private Vector3 zeroAc;
private Vector3 curAc;
private float sensH = 10;
private float sensV = 10;
private float smooth = 0.5f;
private float GetAxisH = 0;
private float GetAxisV = 0;
// Use this for initialization
void Start () {
ResetAxes();
}
//accelerometer
void ResetAxes(){
zeroAc = Input.acceleration;
curAc = Vector3.zero;
}
void FixedUpdate () {
curAc = Vector3.Lerp(curAc, Input.acceleration-zeroAc, Time.deltaTime/smooth);
GetAxisH = Mathf.Clamp(curAc.x * sensH, -1, 1);
GetAxisV = Mathf.Clamp(-curAc.z * sensV, -1, 1);
Vector3 movement = new Vector3 (GetAxisH, 0.0f, GetAxisV);
GetComponent<Rigidbody>().AddForce(movement * speedAc);
}
}
Can someone help me, please?
I have answered this one in another SO question: Unity 3D realistic accelerometer control - you've got two variants of control there, just copy the one you find fit.
If you're having more problems, ask.
I am attempting to follow a few unity3d exmaples on c# scripting for tower defense style games. I need a turret to 'aim' at another gameobject. The examples I find do not seem to account for a origin that is not at 0,0,0. Meaning, when the turret is in a different location, it aims based on a starting point, not its current location.
how it is behaving now:
http://screencast.com/t/Vx35LJXRKNUm
In the script I am using, how do I give Quaternion.LookRotation information about the current location of the turret for it to include in it's calculation? script, function CalculateAimPosition, line59 :
using UnityEngine;
using System.Collections;
public class TurretBehavior : MonoBehaviour {
public GameObject projectile;
public GameObject muzzleEffect;
public float reloadTime = 1f;
public float turnSpeed = 5f;
public float firePauseTime = .25f;
public Transform target;
public Transform[] muzzlePositions;
public Transform turretBall;
private float nextFireTime;
private float nextMoveTime;
private Quaternion desiredRotation;
private Vector3 aimPoint;
// Update is called once per frame
void Update ()
{
if (target)
{
if (Time.time >= nextMoveTime)
{
CalculateAimPosition(target.position);
transform.rotation = Quaternion.Lerp(turretBall.rotation, desiredRotation, Time.deltaTime * turnSpeed);
}
if (Time.time >= nextFireTime) {
FireProjectile();
}
}
}
void OnTriggerEnter(Collider other)
{
if (other.gameObject.tag == "TurretEnemy")
{
nextFireTime = Time.time +(reloadTime *.5f);
target = other.gameObject.transform;
}
}
void OnTriggerExit(Collider other)
{
if (other.gameObject.transform == target) {
target = null;
}
}
void CalculateAimPosition(Vector3 targetPosition)
{
aimPoint = new Vector3 (targetPosition.x, targetPosition.y, targetPosition.z);
desiredRotation = Quaternion.LookRotation (aimPoint);
}
void FireProjectile()
{
nextFireTime = Time.time + reloadTime;
nextMoveTime = Time.time + firePauseTime;
foreach(Transform transform in muzzlePositions)
{
Instantiate(projectile, transform.position, transform.rotation);
}
}
}
The error is in the usage of Quaternion.LookRotation.
The function takes two Vector3 as input which are a forward direction in world space (and an optional up vector - default Vector3.up), and returns a Quaternion representing the orientation of such a reference frame.
You are instead supply a world space position as input (targetPosition), which makes no sense. Accidentally a normalized position vector expressed in world space is the direction from origin to the given point, so it works correctly when the tower is placed on the origin.
What you need to use as LookRotation parameter is the world space direction from the tower to the target:
Vector3 aimDir = (targetPosition - transform.position).normalized;
desiredRotation = Quaternion.LookRotation (aimDir );