ARKit estimatedVerticalPlane hit test get plane rotation - arkit

I am using ARKit to detect walls at runtime, I use a hit test of type .estimatedVerticalPlane when some point of the screen is touched. I am trying to apply Y rotation to node corresponding to the detected plane orientation.
I want to compute the rotation in :
private func computeYRotationForHitLocation(hitTestResult: ARHitTestResult) -> Float {
guard hitTestResult.type == .estimatedVerticalPlane else { return 0.0 }
// guard let planeAnchor = hitTestResult.anchor as? ARPlaneAnchor else { return 0.0 }
// guard let anchoredNode = sceneView.node(for: planeAnchor) else { return 0.0 }
let worldTransform = hitTestResult.worldTransform
let anchorNodeOrientation = ???
return .pi * anchorNodeOrientation.y
}
How to deduce the anchorNodeOrientation to apply given the wall orientation, this post explains it well for a hit test type that provide an ARAnchor but for estimatedVerticalPlane it is nil. (ARKit 1.5 how to get the rotation of a vertical plane).
Also when I do : po hitTestResult.worldTransform on the debugger it prints a rotation for worldTransform 91 degrees etc but I cannot retrieve it from the transform.

I finally managed to get the Euler Angles from the transform through the following transformation, still have to check the results correctness :
import SceneKit
import ARKit
public extension matrix_float4x4 {
/// Retrieve translation from a quaternion matrix
public var translation: SCNVector3 {
get {
return SCNVector3Make(columns.3.x, columns.3.y, columns.3.z)
}
}
/// Retrieve euler angles from a quaternion matrix
public var eulerAngles: SCNVector3 {
get {
//first we get the quaternion from m00...m22
//see http://www.euclideanspace.com/maths/geometry/rotations/conversions/matrixToQuaternion/index.htm
let qw = sqrt(1 + self.columns.0.x + self.columns.1.y + self.columns.2.z) / 2.0
let qx = (self.columns.2.y - self.columns.1.z) / (qw * 4.0)
let qy = (self.columns.0.z - self.columns.2.x) / (qw * 4.0)
let qz = (self.columns.1.x - self.columns.0.y) / (qw * 4.0)
//then we deduce euler angles with some cosines
//see https://en.wikipedia.org/wiki/Conversion_between_quaternions_and_Euler_angles
// roll (x-axis rotation)
let sinr = +2.0 * (qw * qx + qy * qz)
let cosr = +1.0 - 2.0 * (qx * qx + qy * qy)
let roll = atan2(sinr, cosr)
// pitch (y-axis rotation)
let sinp = +2.0 * (qw * qy - qz * qx)
var pitch: Float
if fabs(sinp) >= 1 {
pitch = copysign(Float.pi / 2, sinp)
} else {
pitch = asin(sinp)
}
// yaw (z-axis rotation)
let siny = +2.0 * (qw * qz + qx * qy)
let cosy = +1.0 - 2.0 * (qy * qy + qz * qz)
let yaw = atan2(siny, cosy)
return SCNVector3(roll, pitch, yaw)
}
}
}

Related

how to find Qibla angle in Flutter Qibla

#Here is my current code to get angle but this is in decimal value I want to get in angle from 1 to 360 degree#
final qiblahDirection = snapshot.data;
var _angle = ((qiblahDirection.qiblah ?? 0) * (pi / 180) * -1
double calculateQiblaDirection(Coordinates coordinates) {
// Equation from "Spherical Trigonometry For the use of colleges and schools" page 50
final longitudeDelta =
radians(makkah.longitude) - radians(coordinates.longitude);
final latitudeRadians = radians(coordinates.latitude);
final term1 = sin(longitudeDelta);
final term2 = cos(latitudeRadians) * tan(radians(makkah.latitude));
final term3 = sin(latitudeRadians) * cos(longitudeDelta);
final angle = atan2(term1, term2 - term3);
return DoubleUtil.unwindAngle(degrees(angle));
}
class DoubleUtil {
static double normalizeWithBound(double value, double max) {
return value - (max * ((value / max).floorToDouble()));
}
static double unwindAngle(double value) {
return normalizeWithBound(value, 360);
}
static double closestAngle(double angle) {
if (angle >= -180 && angle <= 180) {
return angle;
}
return angle - (360 * (angle / 360).roundToDouble());
}
}

Is it possible to turn gps ground speed and vehical heading to acceleration and yaw angle in NED [duplicate]

I am developing an android application to calculate position based on Sensor's Data
Accelerometer --> Calculate Linear Acceleration
Magnetometer + Accelerometer --> Direction of movement
The initial position will be taken from GPS (Latitude + Longitude).
Now based on Sensor's Readings i need to calculate the new position of the Smartphone:
My Algorithm is following - (But is not calculating Accurate Position): Please help me improve it.
Note: My algorithm Code is in C# (I am sending Sensor Data to Server - Where Data is stored in the Database. I am calculating the position on Server)
All DateTime Objects have been calculated using TimeStamps - From 01-01-1970
var prevLocation = ServerHandler.getLatestPosition(IMEI);
var newLocation = new ReceivedDataDTO()
{
LocationDataDto = new LocationDataDTO(),
UsersDto = new UsersDTO(),
DeviceDto = new DeviceDTO(),
SensorDataDto = new SensorDataDTO()
};
//First Reading
if (prevLocation.Latitude == null)
{
//Save GPS Readings
newLocation.LocationDataDto.DeviceId = ServerHandler.GetDeviceIdByIMEI(IMEI);
newLocation.LocationDataDto.Latitude = Latitude;
newLocation.LocationDataDto.Longitude = Longitude;
newLocation.LocationDataDto.Acceleration = float.Parse(currentAcceleration);
newLocation.LocationDataDto.Direction = float.Parse(currentDirection);
newLocation.LocationDataDto.Speed = (float) 0.0;
newLocation.LocationDataDto.ReadingDateTime = date;
newLocation.DeviceDto.IMEI = IMEI;
// saving to database
ServerHandler.SaveReceivedData(newLocation);
return;
}
//If Previous Position not NULL --> Calculate New Position
**//Algorithm Starts HERE**
var oldLatitude = Double.Parse(prevLocation.Latitude);
var oldLongitude = Double.Parse(prevLocation.Longitude);
var direction = Double.Parse(currentDirection);
Double initialVelocity = prevLocation.Speed;
//Get Current Time to calculate time Travelling - In seconds
var secondsTravelling = date - tripStartTime;
var t = secondsTravelling.TotalSeconds;
//Calculate Distance using physice formula, s= Vi * t + 0.5 * a * t^2
// distanceTravelled = initialVelocity * timeTravelling + 0.5 * currentAcceleration * timeTravelling * timeTravelling;
var distanceTravelled = initialVelocity * t + 0.5 * Double.Parse(currentAcceleration) * t * t;
//Calculate the Final Velocity/ Speed of the device.
// this Final Velocity is the Initil Velocity of the next reading
//Physics Formula: Vf = Vi + a * t
var finalvelocity = initialVelocity + Double.Parse(currentAcceleration) * t;
//Convert from Degree to Radians (For Formula)
oldLatitude = Math.PI * oldLatitude / 180;
oldLongitude = Math.PI * oldLongitude / 180;
direction = Math.PI * direction / 180.0;
//Calculate the New Longitude and Latitude
var newLatitude = Math.Asin(Math.Sin(oldLatitude) * Math.Cos(distanceTravelled / earthRadius) + Math.Cos(oldLatitude) * Math.Sin(distanceTravelled / earthRadius) * Math.Cos(direction));
var newLongitude = oldLongitude + Math.Atan2(Math.Sin(direction) * Math.Sin(distanceTravelled / earthRadius) * Math.Cos(oldLatitude), Math.Cos(distanceTravelled / earthRadius) - Math.Sin(oldLatitude) * Math.Sin(newLatitude));
//Convert From Radian to degree/Decimal
newLatitude = 180 * newLatitude / Math.PI;
newLongitude = 180 * newLongitude / Math.PI;
This is the Result I get --> Phone was not moving. As you can see speed is 27.3263111114502 So there is something wrong in calculating Speed but I don't know what
ANSWER:
I found a solution to calculate position based on Sensor: I have posted an Answer below.
If you need any help, please leave a comment
this is The results compared to GPS (Note: GPS is in Red)
As some of you mentioned you got the equations wrong but that is just a part of the error.
Newton - D'Alembert physics for non relativistic speeds dictates this:
// init values
double ax=0.0,ay=0.0,az=0.0; // acceleration [m/s^2]
double vx=0.0,vy=0.0,vz=0.0; // velocity [m/s]
double x=0.0, y=0.0, z=0.0; // position [m]
// iteration inside some timer (dt [seconds] period) ...
ax,ay,az = accelerometer values
vx+=ax*dt; // update speed via integration of acceleration
vy+=ay*dt;
vz+=az*dt;
x+=vx*dt; // update position via integration of velocity
y+=vy*dt;
z+=vz*dt;
the sensor can rotate so the direction must be applied:
// init values
double gx=0.0,gy=-9.81,gz=0.0; // [edit1] background gravity in map coordinate system [m/s^2]
double ax=0.0,ay=0.0,az=0.0; // acceleration [m/s^2]
double vx=0.0,vy=0.0,vz=0.0; // velocity [m/s]
double x=0.0, y=0.0, z=0.0; // position [m]
double dev[9]; // actual device transform matrix ... local coordinate system
(x,y,z) <- GPS position;
// iteration inside some timer (dt [seconds] period) ...
dev <- compass direction
ax,ay,az = accelerometer values (measured in device space)
(ax,ay,az) = dev*(ax,ay,az); // transform acceleration from device space to global map space without any translation to preserve vector magnitude
ax-=gx; // [edit1] remove background gravity (in map coordinate system)
ay-=gy;
az-=gz;
vx+=ax*dt; // update speed (in map coordinate system)
vy+=ay*dt;
vz+=az*dt;
x+=vx*dt; // update position (in map coordinate system)
y+=vy*dt;
z+=vz*dt;
gx,gy,gz is the global gravity vector (~9.81 m/s^2 on Earth)
in code my global Y axis points up so the gy=-9.81 and the rest are 0.0
measure timings are critical
Accelerometer must be checked as often as possible (second is a very long time). I recommend not to use timer period bigger than 10 ms to preserve accuracy also time to time you should override calculated position with GPS value. Compass direction can be checked less often but with proper filtration
compass is not correct all the time
Compass values should be filtered for some peak values. Sometimes it read bad values and also can be off by electro-magnetic polution or metal enviroment. In that case the direction can be checked by GPS during movement and some corrections can be made. For example chech GPS every minute and compare GPS direction with compass and if it is constantly of by some angle then add it or substract it.
why do simple computations on server ???
Hate on-line waste of traffic. Yes you can log data on server (but still i think file on device will be better) but why to heck limit position functionality by internet connection ??? not to mention the delays ...
[Edit 1] additional notes
Edited the code above a little. The orientation must be as precise as it can be to minimize cumulative errors.
Gyros would be better than compass (or even better use them both). Acceleration should be filtered. Some low pass filtering should be OK. After gravity removal I would limit ax,ay,az to usable values and throw away too small values. If near low speed also do full stop (if it is not a train or motion in vacuum). That should lower the drift but increase other errors so an compromise has to be found between them.
Add calibration on the fly. When filtered acceleration = 9.81 or very close to it then the device is probably stand still (unless its a flying machine). Orientation/direction can be corrected by actual gravity direction.
Acceleration sensors and gyros are not suited for position calculation.
After some seconds the errors become incredible high. (I hardly remember that the double integration is the problem).
Look at this Google tech talk video about sensor fusioning,
he explains in very detail why this is not possible.
After solving the position I calculated using Sensors I would like to post my code here in case anyone needs in future:
Note: This was only checked on Samsung Galaxy S2 phone and only when person was walking with the phone, it has not been tested when moving in car or on bike
This is the result I got when compared when compared with GPS, (Red Line GPS, Blue is Position calculated with Sensor)
The code is not very efficient, but I hope my sharing this code will help someone and point them in the right direction.
I had two seperate classes:
CalculatePosition
CustomSensorService
public class CalculatePosition {
static Double earthRadius = 6378D;
static Double oldLatitude,oldLongitude;
static Boolean IsFirst = true;
static Double sensorLatitude, sensorLongitude;
static Date CollaborationWithGPSTime;
public static float[] results;
public static void calculateNewPosition(Context applicationContext,
Float currentAcceleration, Float currentSpeed,
Float currentDistanceTravelled, Float currentDirection, Float TotalDistance) {
results = new float[3];
if(IsFirst){
CollaborationWithGPSTime = new Date();
Toast.makeText(applicationContext, "First", Toast.LENGTH_LONG).show();
oldLatitude = CustomLocationListener.mLatitude;
oldLongitude = CustomLocationListener.mLongitude;
sensorLatitude = oldLatitude;
sensorLongitude = oldLongitude;
LivePositionActivity.PlotNewPosition(oldLongitude,oldLatitude,currentDistanceTravelled * 1000, currentAcceleration, currentSpeed, currentDirection, "GPSSensor",0.0F,TotalDistance);
IsFirst = false;
return;
}
Date CurrentDateTime = new Date();
if(CurrentDateTime.getTime() - CollaborationWithGPSTime.getTime() > 900000){
//This IF Statement is to Collaborate with GPS position --> For accuracy --> 900,000 == 15 minutes
oldLatitude = CustomLocationListener.mLatitude;
oldLongitude = CustomLocationListener.mLongitude;
LivePositionActivity.PlotNewPosition(oldLongitude,oldLatitude,currentDistanceTravelled * 1000, currentAcceleration, currentSpeed, currentDirection, "GPSSensor", 0.0F, 0.0F);
return;
}
//Convert Variables to Radian for the Formula
oldLatitude = Math.PI * oldLatitude / 180;
oldLongitude = Math.PI * oldLongitude / 180;
currentDirection = (float) (Math.PI * currentDirection / 180.0);
//Formulae to Calculate the NewLAtitude and NewLongtiude
Double newLatitude = Math.asin(Math.sin(oldLatitude) * Math.cos(currentDistanceTravelled / earthRadius) +
Math.cos(oldLatitude) * Math.sin(currentDistanceTravelled / earthRadius) * Math.cos(currentDirection));
Double newLongitude = oldLongitude + Math.atan2(Math.sin(currentDirection) * Math.sin(currentDistanceTravelled / earthRadius)
* Math.cos(oldLatitude), Math.cos(currentDistanceTravelled / earthRadius)
- Math.sin(oldLatitude) * Math.sin(newLatitude));
//Convert Back from radians
newLatitude = 180 * newLatitude / Math.PI;
newLongitude = 180 * newLongitude / Math.PI;
currentDirection = (float) (180 * currentDirection / Math.PI);
//Update old Latitude and Longitude
oldLatitude = newLatitude;
oldLongitude = newLongitude;
sensorLatitude = oldLatitude;
sensorLongitude = oldLongitude;
IsFirst = false;
//Plot Position on Map
LivePositionActivity.PlotNewPosition(newLongitude,newLatitude,currentDistanceTravelled * 1000, currentAcceleration, currentSpeed, currentDirection, "Sensor", results[0],TotalDistance);
}
}
public class CustomSensorService extends Service implements SensorEventListener{
static SensorManager sensorManager;
static Sensor mAccelerometer;
private Sensor mMagnetometer;
private Sensor mLinearAccelertion;
static Context mContext;
private static float[] AccelerometerValue;
private static float[] MagnetometerValue;
public static Float currentAcceleration = 0.0F;
public static Float currentDirection = 0.0F;
public static Float CurrentSpeed = 0.0F;
public static Float CurrentDistanceTravelled = 0.0F;
/*---------------------------------------------*/
float[] prevValues,speed;
float[] currentValues;
float prevTime, currentTime, changeTime,distanceY,distanceX,distanceZ;
float[] currentVelocity;
public static CalculatePosition CalcPosition;
/*-----FILTER VARIABLES-------------------------*-/
*
*
*/
public static Float prevAcceleration = 0.0F;
public static Float prevSpeed = 0.0F;
public static Float prevDistance = 0.0F;
public static Float totalDistance;
TextView tv;
Boolean First,FirstSensor = true;
#Override
public void onCreate(){
super.onCreate();
mContext = getApplicationContext();
CalcPosition = new CalculatePosition();
First = FirstSensor = true;
currentValues = new float[3];
prevValues = new float[3];
currentVelocity = new float[3];
speed = new float[3];
totalDistance = 0.0F;
Toast.makeText(getApplicationContext(),"Service Created",Toast.LENGTH_SHORT).show();
sensorManager = (SensorManager) getSystemService(SENSOR_SERVICE);
mAccelerometer = sensorManager.getDefaultSensor(Sensor.TYPE_ACCELEROMETER);
mMagnetometer = sensorManager.getDefaultSensor(Sensor.TYPE_MAGNETIC_FIELD);
//mGyro = sensorManager.getDefaultSensor(Sensor.TYPE_GYROSCOPE);
mLinearAccelertion = sensorManager.getDefaultSensor(Sensor.TYPE_LINEAR_ACCELERATION);
sensorManager.registerListener(this, mAccelerometer, SensorManager.SENSOR_DELAY_NORMAL);
sensorManager.registerListener(this, mMagnetometer, SensorManager.SENSOR_DELAY_NORMAL);
//sensorManager.registerListener(this, mGyro, SensorManager.SENSOR_DELAY_NORMAL);
sensorManager.registerListener(this, mLinearAccelertion, SensorManager.SENSOR_DELAY_NORMAL);
}
#Override
public void onDestroy(){
Toast.makeText(this, "Service Destroyed", Toast.LENGTH_SHORT).show();
sensorManager.unregisterListener(this);
//sensorManager = null;
super.onDestroy();
}
#Override
public void onAccuracyChanged(Sensor sensor, int accuracy) {
// TODO Auto-generated method stub
}
#Override
public void onSensorChanged(SensorEvent event) {
float[] values = event.values;
Sensor mSensor = event.sensor;
if(mSensor.getType() == Sensor.TYPE_ACCELEROMETER){
AccelerometerValue = values;
}
if(mSensor.getType() == Sensor.TYPE_LINEAR_ACCELERATION){
if(First){
prevValues = values;
prevTime = event.timestamp / 1000000000;
First = false;
currentVelocity[0] = currentVelocity[1] = currentVelocity[2] = 0;
distanceX = distanceY= distanceZ = 0;
}
else{
currentTime = event.timestamp / 1000000000.0f;
changeTime = currentTime - prevTime;
prevTime = currentTime;
calculateDistance(event.values, changeTime);
currentAcceleration = (float) Math.sqrt(event.values[0] * event.values[0] + event.values[1] * event.values[1] + event.values[2] * event.values[2]);
CurrentSpeed = (float) Math.sqrt(speed[0] * speed[0] + speed[1] * speed[1] + speed[2] * speed[2]);
CurrentDistanceTravelled = (float) Math.sqrt(distanceX * distanceX + distanceY * distanceY + distanceZ * distanceZ);
CurrentDistanceTravelled = CurrentDistanceTravelled / 1000;
if(FirstSensor){
prevAcceleration = currentAcceleration;
prevDistance = CurrentDistanceTravelled;
prevSpeed = CurrentSpeed;
FirstSensor = false;
}
prevValues = values;
}
}
if(mSensor.getType() == Sensor.TYPE_MAGNETIC_FIELD){
MagnetometerValue = values;
}
if(currentAcceleration != prevAcceleration || CurrentSpeed != prevSpeed || prevDistance != CurrentDistanceTravelled){
if(!FirstSensor)
totalDistance = totalDistance + CurrentDistanceTravelled * 1000;
if (AccelerometerValue != null && MagnetometerValue != null && currentAcceleration != null) {
//Direction
float RT[] = new float[9];
float I[] = new float[9];
boolean success = SensorManager.getRotationMatrix(RT, I, AccelerometerValue,
MagnetometerValue);
if (success) {
float orientation[] = new float[3];
SensorManager.getOrientation(RT, orientation);
float azimut = (float) Math.round(Math.toDegrees(orientation[0]));
currentDirection =(azimut+ 360) % 360;
if( CurrentSpeed > 0.2){
CalculatePosition.calculateNewPosition(getApplicationContext(),currentAcceleration,CurrentSpeed,CurrentDistanceTravelled,currentDirection,totalDistance);
}
}
prevAcceleration = currentAcceleration;
prevSpeed = CurrentSpeed;
prevDistance = CurrentDistanceTravelled;
}
}
}
#Override
public IBinder onBind(Intent arg0) {
// TODO Auto-generated method stub
return null;
}
public void calculateDistance (float[] acceleration, float deltaTime) {
float[] distance = new float[acceleration.length];
for (int i = 0; i < acceleration.length; i++) {
speed[i] = acceleration[i] * deltaTime;
distance[i] = speed[i] * deltaTime + acceleration[i] * deltaTime * deltaTime / 2;
}
distanceX = distance[0];
distanceY = distance[1];
distanceZ = distance[2];
}
}
EDIT:
public static void PlotNewPosition(Double newLatitude, Double newLongitude, Float currentDistance,
Float currentAcceleration, Float currentSpeed, Float currentDirection, String dataType) {
LatLng newPosition = new LatLng(newLongitude,newLatitude);
if(dataType == "Sensor"){
tvAcceleration.setText("Speed: " + currentSpeed + " Acceleration: " + currentAcceleration + " Distance: " + currentDistance +" Direction: " + currentDirection + " \n");
map.addMarker(new MarkerOptions()
.position(newPosition)
.title("Position")
.snippet("Sensor Position")
.icon(BitmapDescriptorFactory
.fromResource(R.drawable.line)));
}else if(dataType == "GPSSensor"){
map.addMarker(new MarkerOptions()
.position(newPosition)
.title("PositionCollaborated")
.snippet("GPS Position"));
}
else{
map.addMarker(new MarkerOptions()
.position(newPosition)
.title("Position")
.snippet("New Position")
.icon(BitmapDescriptorFactory
.fromResource(R.drawable.linered)));
}
map.moveCamera(CameraUpdateFactory.newLatLngZoom(newPosition, 18));
}
As per our discussion, since your acceleration is continuously changing, the equations of motion that you have applied shall not give you an accurate answer.
You may have to keep updating your position and velocities as and when you get a new reading for acceleration.
Since this would be highly inefficient, my suggestion would be to call the update function every few seconds and use the average value of acceleration during that period to get the new velocity and position.
I am not quite sure, but my best guess would be around this part:
Double initialVelocity = prevLocation.Speed;
var t = secondsTravelling.TotalSeconds;
var finalvelocity = initialVelocity + Double.Parse(currentAcceleration) * t;
if lets say at the prevLocation the speed was: 27.326... and t==0 and currentAcceleration ==0 (as you said you were idle) the finalvelocity would come down to
var finalvelocity = 27.326 + 0*0;
var finalvelocity == 27.326
If the finalvelocity becomes the speed of the currentlocation, so that previouslocation = currentlocation. This would mean that your finalvelocity might not go down. But then again, there's quite a bit of assumptions here.
Seems like you are making it hard on yourself. You should be able to simply use the Google Play Service Location API and easily access location, direction, speed, etc. accurately.
I would look into using that instead of doing work server side for it.

Apply scale/translation after image target is lost Vuforia Unity

I have an 3D model attached to a image target. What I want is to scale the model according to the camera's position, i.e. when I get closer to the model I want it to get larger and when I get far away it should get smaller. I searched the forums and tried two different solutions:
Solution 1:
I created a script and attached to the 3d model:
public class AR_Capsule : MonoBehaviour {
float minimumDistance = 0;
float maximumDistance = 3;
float minimumDistanceScale = 0.3f;
float maximumDistanceScale = 0.1f;
void Update()
{
float distance = (transform.position - Camera.main.transform.position).magnitude;
float norm = (distance - minimumDistance) / (maximumDistance - minimumDistance);
norm = Mathf.Clamp01(norm);
Vector3 minScale = Vector3.one * maximumDistanceScale;
Vector3 maxScale = Vector3.one * minimumDistanceScale;
transform.localScale = Vector3.Lerp(maxScale, minScale, norm);
}
}
Solution 2:
public class AR_Capsule : MonoBehaviour {
public float speed = 10f;
void Update()
{
transform.Translate(0, 0,(-1 * Input.acceleration.z * speed * Time.deltaTime));
}
}
Both solutions work when the image target is visible. But when I lose the target from sight, then the object stays still and camera position is always (0.0, 0.0, 0.0).
Do you have any idea why the object freezes when the target is lost?
I am developing for Epson Moverio bt-300 and working with vuforia eyewear sample app.
Thanks.
I found the issue. The problem is about getting the correct camera position. Since I am using a eyewear Camera.main.transform.position will give always (0,0,0). Instead I used Camera.main.transform.GetChild(1).transform.position which work fine. Below is the working code:
initialDist = Vector3.Distance(transform.position, Camera.main.transform.GetChild(1).transform.position);
distance = Vector3.Distance(lastMarkerpos, Camera.main.transform.GetChild(1).transform.position);
if((transform.localScale.x * (initialDist / distance)) > 0.05 && (transform.localScale.x * (initialDist / distance)) < 2)
{
transform.localScale = new Vector3(transform.localScale.x * (initialDist / distance), transform.localScale.y * (initialDist / distance), transform.localScale.z * (initialDist / distance));
}

Restrict Direction of rotation in Realtime

have a hand of clock which the player once starts rotating (Dragging) CCW until he completes one full rotation. ( Without lifting drag )
I am trying to lock the rotation to only CCW direction while/once the player starts rotating. I got help from the following links : [Detect Direction][1] by #BobBobson108
Here is gif of what is actually happening: Demo
void OnMouseDrag()
{
//rotation
Vector3 mousePos = Input.mousePosition;
mousePos.z = 5.23f;
Vector3 objectPos = Camera.main.WorldToScreenPoint(transform.position);
mousePos.x = mousePos.x - objectPos.x;
mousePos.y = mousePos.y - objectPos.y;
angle = Mathf.Atan2(mousePos.y, mousePos.x) * Mathf.Rad2Deg;
transform.rotation = Quaternion.Euler(new Vector3(0, 0, angle - 90f));
hand_vector = transform.up;
cross_product = Vector3.Cross(ref_vector, hand_vector);
dot_product = Vector3.Dot(cross_product, transform.forward*-1);
//Debug.Log("Hand Vector: " + hand_vector);
//Debug.Log("Ref Vector: " + ref_vector);
Debug.Log(cross_product);
Debug.Log(dot_product);
}
I tried to debug the values of the cross product, but the direction of resultant vector seems to be same even when when the player starts backward rotation.
Also the cross product vector changes direction only when the player starts rotation in CW direction from the default position i.e. 12 'o clock.
I have very less experience of working with Quaternions and rotations. Any help will be highly helpful. Thanks !!!
Desired
Use http://docs.unity3d.com/ScriptReference/Vector3.Angle.html function to get the value of the angle. You should get a positive/negative value depending on the direction.
You could then lock the rotation if the sign is not the correct direction you want.
I managed to figure out a solution to above question. Here I am posting my solution in case anyone stumbles across a similar situation in future.
void OnMouseDrag()
{
transform.Rotate(new Vector3(0,0, Mathf.Sqrt(Input.GetAxis("Mouse X") * Input.GetAxis("Mouse X") + Input.GetAxis("Mouse Y") * Input.GetAxis("Mouse Y"))));
/*
Vector3 mousePos = Input.mousePosition;
mousePos.z = 5.23f;
Vector3 objectPos = Camera.main.WorldToScreenPoint(transform.position);
mousePos.x = mousePos.x - objectPos.x;
mousePos.y = mousePos.y - objectPos.y;
print("Angle is :");
print(Mathf.Atan2(mousePos.y, mousePos.x) * Mathf.Rad2Deg - 90f);
if (counter_clockwise)
{
if (Mathf.Atan2(mousePos.y, mousePos.x) * Mathf.Rad2Deg - 90f > 0)
{
angle = Mathf.Max(angle, Mathf.Atan2(mousePos.y, mousePos.x) * Mathf.Rad2Deg - 90);
transform.rotation = Quaternion.Euler(new Vector3(0, 0, angle));
}
if (Mathf.Atan2(mousePos.y, mousePos.x) * Mathf.Rad2Deg - 90f < 0)
{
angle = Mathf.Max(angle, 360 + (Mathf.Atan2(mousePos.y, mousePos.x) * Mathf.Rad2Deg - 90));
transform.rotation = Quaternion.Euler(new Vector3(0, 0, angle));
}
}
*/
glow_color.a = 1f;
child_yellowglow.color = glow_color;
}
I am setting the direction bools from using this :
void OnMouseDown()
{
Vector3 mouseDragStartPos = Input.mousePosition;
print("Mouse pos: " + mouseDragStartPos);
print(" ref_vector" + ref_vector);
if (mouseDragStartPos.x < ref_vector.x)
{
clockwise = false;
counter_clockwise = true;
print("Left !!!");
}
if (mouseDragStartPos.x >= ref_vector.x)
{
clockwise = true;
counter_clockwise = false;
print("Right !!!");
}
}
Hope this helps !!!

Is there a way to point the camera in the direction of a specific vector in Javafx8?

I've looked around for awhile and I can't seem to find a way to point a camera in a direction say [1, 2, 3] without calculating the angles between everything. I'm actually new to 3d graphics in general so I don't know if I'm missing something simple.
I have edited my answer to provide an example of how to manipulate the Affine transform
relative to current position in scene. Which is a very useful Transform for 3D space.
I will cover the basics with code, I do suggest you read up on Transformation Matrices.
This is a good Tutorial and cover everything in "layman's" terms.
So first define a few Callbacks for getting both rows and cols from the affine matrix:
public Affine affine = new Affine();
//Points: fwd, right, up Point3D: pos
//Forward / look direction
Callback<Transform, Point3D> forwardColCallback = (a) -> {
return new Point3D(a.getMzx(), a.getMzy(), a.getMzz());
};
Callback<Transform, Point3D> forwardRowCallback = (a) -> {
return new Point3D(a.getMxz(), a.getMyz(), a.getMzz());
};
// up direction
Callback<Transform, Point3D> upColCallback = (a) -> {
return new Point3D(a.getMyx(), a.getMyy(), a.getMyz());
};
Callback<Transform, Point3D> upRowCallback = (a) -> {
return new Point3D(a.getMxy(), a.getMyy(), a.getMzy());
};
// right direction
Callback<Transform, Point3D> rightColCallback = (a) -> {
return new Point3D(a.getMxx(), a.getMxy(), a.getMxz());
};
Callback<Transform, Point3D> rightRowCallback = (a) -> {
return new Point3D(a.getMxx(), a.getMyx(), a.getMzx());
};
//position
Callback<Transform, Point3D> positionCallback = (a) ->{
return new Point3D(a.getTx(), a.getTy(), a.getTz());
};
Point3D getForwardCol(){
return forwardColCallback.call(getTransformableNode().getLocalToSceneTransform());
}
Point3D getForwardRow(){
return forwardRowCallback.call(getTransformableNode().getLocalToSceneTransform());
}
Point3D getRightDirection(){
return rightDirCallback.call(getTransformableNode().getLocalToSceneTransform());
}
Point3D getRightRow(){
return rightRowCallback.call(getTransformableNode().getLocalToSceneTransform());
}
Point3D getUpCol(){
return upColCallback.call(getTransformableNode().getLocalToSceneTransform());
}
Point3D getUpRow(){
return upRowCallback.call(getTransformableNode().getLocalToSceneTransform());
}
Point3D getPosition(){
return positionCallback.call(getTransformableNode().getLocalToSceneTransform());
}
Ok, Now lets build some methods to use those.
Translations:
private void moveForward() {
affine.setTx(getPosition().getX() + speed * getForwardRow().x);
affine.setTy(getPosition().getY() + speed * getForwardRow().y);
affine.setTz(getPosition().getZ() + speed * getForwardRow().z);
}
private void strafeLeft() {
affine.setTx(getPosition().getX() + speed * -getRightRow().x);
affine.setTy(getPosition().getY() + speed * -getRightRow().y);
affine.setTz(getPosition().getZ() + speed * -getRightRow().z);
}
private void strafeRight() {
affine.setTx(getPosition().getX() + speed * getRightRow().x);
affine.setTy(getPosition().getY() + speed * getRightRow().y);
affine.setTz(getPosition().getZ() + speed * getRightRow().z);
}
private void moveBack() {
affine.setTx(getPosition().getX() + speed * -getForwardRow().x);
affine.setTy(getPosition().getY() + speed * -getForwardRow().y);
affine.setTz(getPosition().getZ() + speed * -getForwardRow().z);
}
private void moveUp() {
affine.setTx(getPosition().getX() + speed * -getUpRow().x);
affine.setTy(getPosition().getY() + speed * -getUpRow().y);
affine.setTz(getPosition().getZ() + speed * -getUpRow().z);
}
private void moveDown() {
affine.setTx(getPosition().getX() + speed * getUpRow().x);
affine.setTy(getPosition().getY() + speed * getUpRow().y);
affine.setTz(getPosition().getZ() + speed * getUpRow().z);
}
For Rotations you could do something like this by adding a Translate and the 3 respective Rotate transforms:
// mouse coordinates not implemented in this example
// You would need to define them
scene.setOnMousePressed(me->{
t.setX(getPosition().getX());
t.setY(getPosition().getY());
t.setZ(getPosition().getZ());
affine.setToIdentity();
rotateY.setAngle(
MathUtils.clamp(((rotateY.getAngle() + moveDelta.getX() * (speed * 0.05)) % 360 + 540) % 360 - 180, -360, 360)
); // horizontal
rotateX.setAngle(
MathUtils.clamp(((rotateX.getAngle() - moveDelta.getY() * (speed * 0.05)) % 360 + 540) % 360 - 180, -90, 90)
); // vertical
affine.prepend(t.createConcatenation(rotateY.createConcatenation(rotateX)));
}
);