want to get screen point on render texture camera unity3d - unity3d

I am making sniper game, I have 2 cameras, one is rendering whole environment, and second one is in scope of the gun, which has high zoom quality.
I am able to show, enemy point on screen through main camera. and it shows proper on main camera, but in scope camera(2nd camera), it not showing properly. I want to show these points exactly same as enemy is.
here is my script to show point so far. I am attaching screen shots, first one is for scope camera and does not showing proper place, and 2nd one is for main camera, showing at proper place (Red circle is pointing)
using UnityEngine;
using UnityEngine.UI;
public class identity_shower : MonoBehaviour {
public Texture tex;
public GameObject helt;
public bool target_is_high =false;
private int counter_for_high = -1;
private int counter_for_low = -1;
private int counter_value = 50;
private bool bool_for_high = false;
private bool bool_for_low = false;
private Camera scope_cam_active;
private RectTransform rectTransform;
private Vector2 uiOFFset;
// Update is called once per frame
void OnGUI()
{
GameObject[] objs = GameObject.FindGameObjectsWithTag("enemy_soldier");
if( objs.Length<=0 )
{
return;
}
if (bool_for_high)
{
Vector2 pos_to_display = Camera.main.WorldToScreenPoint (this.transform.position);
GUI.DrawTexture (new Rect(pos_to_display.x-10.0f,(Screen.height- pos_to_display.y)-40.0f,15,15),tex);
}
}
public void show_your_identity()
{
if (target_is_high) {
if (counter_for_high >= counter_value)
{
return;
}
counter_for_high = counter_for_high + 1;
if (counter_for_high >= counter_value)
{
bool_for_high = true;
}
} else if(!target_is_high)
{
if (counter_for_low >= counter_value)
{
return;
}
counter_for_low = counter_for_low + 1;
if (counter_for_low >= counter_value)
{
bool_for_low = true;
}
}
}
}

Related

Unity renders object before updating Rigidbody2D's properties

I am trying to implement a way for the player to switch between a square and a circle. For this to work my player object has two colliders, one circle and one box. When switching between them I simply disable one collider and enable the other, and switch the current sprite. The issue arises when I switch from a circle to a square.
I want the square to be able to glide across the floor, whereas the circle is supposed to roll. In order to make the switch seamless I have to reorient the square to be aligned with the current velocity, and remove the angular velocity. This does seem to work, however there is a slight period of frames (or frame) where the square has the same rotation the circle had before switching. This seems odd to me since the new rotation and sprite is changed in the same part of the code. This is a video showcasing the issue.
If this is an issue resulting from the way the objects are rendered I can solve this another way. I would just like to understand why it happens.
Code snippet of the part that changes the properties from circle to square when switching:
else if (Input.GetKeyDown("2"))
{
// Update rotation of box to velocity:
float newAngleRadians = Mathf.Atan2(rb.velocity.y, rb.velocity.x);
float newAngleDegrees = newAngleRadians * 180 / Mathf.PI;
rb.rotation = newAngleDegrees;
rb.angularVelocity = 0;
Debug.Log(rb.rotation);
playerShape = Shape.SQUARE;
spriteRenderer.sprite = spriteArray[1];
circleCollider.enabled = false;
boxCollider.enabled = true;
updateShape = true;
}
Logging the angle of the rigidbody directly after setting it to newAngleDegrees shows that the rotation has been set correct, yet the issue persists.
And just in case it is needed, full code of the script:
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class scr_StateMachine : MonoBehaviour
{
// Types of shapes:
public enum Shape { SQUARE, CIRCLE, TRIANGLE }
// Variables:
public Rigidbody2D rb;
public SpriteRenderer spriteRenderer;
public Sprite[] spriteArray;
public Shape playerShape;
public CircleCollider2D circleCollider;
public BoxCollider2D boxCollider;
private bool updateShape;
void Start()
{
playerShape = Shape.CIRCLE;
updateShape = true;
}
void Update()
{
// Get input for shape change:
if(Input.GetKeyDown("1"))
{
playerShape = Shape.CIRCLE;
spriteRenderer.sprite = spriteArray[0];
circleCollider.enabled = true;
boxCollider.enabled = false;
updateShape = true;
}
else if (Input.GetKeyDown("2"))
{
// Update rotation of box to velocity:
float newAngleRadians = Mathf.Atan2(rb.velocity.y, rb.velocity.x);
float newAngleDegrees = newAngleRadians * 180 / Mathf.PI;
rb.rotation = newAngleDegrees;
rb.angularVelocity = 0;
Debug.Log(rb.rotation);
playerShape = Shape.SQUARE;
spriteRenderer.sprite = spriteArray[1];
circleCollider.enabled = false;
boxCollider.enabled = true;
updateShape = true;
}
// Update movement script's shape:
if (updateShape)
{
GetComponent<scr_Movement>().shape = playerShape;
updateShape = false;
}
}
}
I think the issue is Rigidbody/Rigidbody2D physics are applied in fixed time steps (see FixedUpdate). Therefore yes, on a strong device it can definitely happen that you render some frames before the next FixedUpdate call kicks in and changes the Rigidbody/Rigidbody2D behavior.
I think what you could do is actually wait with the change for the next FixedUpdate call e.g. using a Coroutine and WaitForFixedUpdate like e.g.
public class scr_StateMachine : MonoBehaviour
{
// Types of shapes:
public enum Shape { SQUARE, CIRCLE, TRIANGLE }
// Variables:
public Rigidbody2D rb;
public SpriteRenderer spriteRenderer;
public Sprite[] spriteArray;
public Shape playerShape;
public CircleCollider2D circleCollider;
public BoxCollider2D boxCollider;
[SerializeField] private scr_Movement _scrMovement;
void Start()
{
if(!_scrMovement) _scrMovement = GetComponent<scr_Movement>();
ChangeShape(Shape.CIRCLE);
}
// Still get User Input in Update
void Update()
{
// Get input for shape change:
if(Input.GetKeyDown("1"))
{
ChangeShape(Shape.CIRCLE);
}
else if (Input.GetKeyDown("2"))
{
ChangeShape(Shape.SQUARE);
}
}
private void ChangeShape(Shape newShape)
{
// if the same shape comes in we already have -> nothing to do
if(newShape == playerShape) return;
// Stop any already running coroutine since we only want to handle the last input before the FixUpdate call
// https://docs.unity3d.com/ScriptReference/MonoBehaviour.StopAllCoroutines.html
StopAllCoroutines();
// Start a new coroutine for the new shape
// see https://docs.unity3d.com/ScriptReference/MonoBehaviour.StartCoroutine.html
StartCoroutine(ChangeShapeInNextFixedUpdate(newShape));
}
private IEnumerator ChangeShapeInNextFixedUpdate(Shape newShape)
{
// just in case again, if the same shape comes in we already have -> nothing to do
if(newShape == playerShape) yield break;
// Wait until the next FixedUpdate call
// see https://docs.unity3d.com/ScriptReference/WaitForFixedUpdate.html
yield return new WaitForFixedUpdate();
// Now do your required changes depending on the new shape
circleCollider.enabled = newShape == Shape.CIRCLE;
boxCollider.enabled = newShape == Shape.SQUARE;
switch(newShape)
{
case Shape.CIRCLE:
spriteRenderer.sprite = spriteArray[0];
break;
case Shape.SQUARE:
// Update rotation of box to velocity:
var newAngleRadians = Mathf.Atan2(rb.velocity.y, rb.velocity.x);
// see https://docs.unity3d.com/ScriptReference/Mathf.Rad2Deg.html
var newAngleDegrees = newAngleRadians * Mathf.Rad2Deg;
rb.rotation = newAngleDegrees;
rb.angularVelocity = 0;
Debug.Log(rb.rotation);
spriteRenderer.sprite = spriteArray[1];
break;
}
playerShape = newShape;
_scrMovement.shape = playerShape;
}
}

Unity Game Object Controlled by real light spot on the wall

We have got various controllers developed for moving gameobjects. I have used magnetometer/gyro sensor to move game object using MUVSlide through following code:
using UnityEngine;
using ForestIndieGames.Muvslide;
public class Connection : MonoBehaviour {
private static bool created = false;
private bool newInputAvailable;
private MuvslideConnection muvslideConn;
private void Awake()
{
if (!created)
{
DontDestroyOnLoad(this.gameObject);
created = true;
muvslideConn = new MuvslideConnection();
}
}
private void OnApplicationQuit()
{
if (muvslideConn != null)
muvslideConn.Close();
}
private void Update()
{
if (muvslideConn.GetInputManager().IsNewMotionAvailable())
newInputAvailable = true;
}
public bool IsNewInputAvailable()
{
bool result = newInputAvailable;
newInputAvailable = false;
return result;
}
public Vector3 GetAngles()
{
float[] angles = muvslideConn.GetInputManager().GetOrientationDegrees();
return new Vector3(angles[0], angles[1], angles[2]);
}
}
What I am trying to achieve is to move a gameobject by a real light spot on the wall. The spot is on the wall and fed through a camera. When the spot moves I want game object to follow exactly. The light spot can be a specific color or IR or UV etc.
Any leads please

Need to disable Particle System as soon as it stops playing

I am trying to implement a Flame Thrower. I have setup the Particle System for flame. It is pooled object. It is not the child of the Flame Thrower. When I press the Fire button the particle system starts and when the button is up it stops. But a problem arised that when player is moving the particle system doesn't move . this was solved by adding the below line
particles.transform.position = transform.GetChild(0).position;
But the I discovered another problem that when rotating the player(This is a 2D sidescroller game) the particles are rotated with it instantly. So when the player changes the direction current particles are stopped and a new particle is activated and played. But now the problem is whenever I change the direction while pressing Fire buttons new objects are created.
The code of my flame thrower is
using UnityEngine;
public class FlameThrower : Gun
{
private ParticleSystem particles;
private int direction = 0;
private bool isFiring = false;
public override void Update()
{
if(shoot)
{
InitShoot();
}
else
{
StopFire();
}
}
public override void InitShoot()
{
if(!isFiring)
{
SelectDirection();
Fire();
}
//Check direction has changed
if(direction != playerManager.direction)
{
StopFire();
}
if(particles != null)
{
particles.transform.position = transform.GetChild(0).position;
}
}
public override void Fire()
{
isFiring = true;
direction = playerManager.direction;
InstantiateParticles(weapon.bulletName, transform.GetChild(0).position, rotation);
}
public override void SelectDirection()
{
if (playerManager.direction == 1)
{
rotation = Quaternion.Euler(transform.rotation.x, transform.rotation.y, -90);
}
else if (playerManager.direction == -1)
{
rotation = Quaternion.Euler(transform.rotation.x, transform.rotation.y, 90);
}
}
public override void InstantiateParticles(string name, Vector3 position, Quaternion rotation)
{
GameObject bullet = ObjectPooler.instance.GetObject(name);
while (bullet == null)
{
ObjectPooler.instance.CreateObject(name);
bullet = ObjectPooler.instance.GetObject(name);
}
if (bullet != null)
{
particles = bullet.GetComponent<ParticleSystem>();
//Set Position and rotation
bullet.transform.position = position;
bullet.transform.rotation = rotation;
bullet.SetActive(true);
particles.Play();
}
}
private void StopFire()
{
if (particles != null)
{
isFiring = false;
particles.Stop();
if(!particles.isPlaying)
{
particles.gameObject.SetActive(false);
particles = null;
}
}
}
}
The problem is that in the function StopFire() it checks whether particle is playing or not. If it is not playing it will disable the Gameobject. But it that part doesn't execute since it is checked soon after particles is stopped and it will still be playing. I want this particle system to be disabled as soon as it stops playing
It seems that the particle system is rendering in local space, that's why it rotates with the object.
Change the Simulation Space to World.
For the other issue, if you disable the game object then particles which are already emitted will disappear. And if you deactivates them with a delay (using Invoke or similar) then within that delay new particles can be emitted.
The best way to handle this is to stop the emission, not deactivating the game object. You can do this in inspector or via code. myParticles.emission.enabled
using UnityEngine;
public class ParticleSystemControllerWindow : MonoBehaviour
{
ParticleSystem system
{
get
{
if (_CachedSystem == null)
_CachedSystem = GetComponent<ParticleSystem>();
return _CachedSystem;
}
}
private ParticleSystem _CachedSystem;
public Rect windowRect = new Rect(0, 0, 300, 120);
public bool includeChildren = true;
void OnGUI()
{
windowRect = GUI.Window("ParticleController".GetHashCode(), windowRect, DrawWindowContents, system.name);
}
void DrawWindowContents(int windowId)
{
if (system)
{
GUILayout.BeginHorizontal();
GUILayout.Toggle(system.isPlaying, "Playing");
GUILayout.Toggle(system.isEmitting, "Emitting");
GUILayout.Toggle(system.isPaused, "Paused");
GUILayout.EndHorizontal();
GUILayout.BeginHorizontal();
if (GUILayout.Button("Play"))
system.Play(includeChildren);
if (GUILayout.Button("Pause"))
system.Pause(includeChildren);
if (GUILayout.Button("Stop Emitting"))
system.Stop(includeChildren, ParticleSystemStopBehavior.StopEmitting);
if (GUILayout.Button("Stop & Clear"))
system.Stop(includeChildren, ParticleSystemStopBehavior.StopEmittingAndClear);
GUILayout.EndHorizontal();
includeChildren = GUILayout.Toggle(includeChildren, "Include Children");
GUILayout.BeginHorizontal();
GUILayout.Label("Time(" + system.time + ")");
GUILayout.Label("Particle Count(" + system.particleCount + ")");
GUILayout.EndHorizontal();
}
else
GUILayout.Label("No particle system found");
GUI.DragWindow();
}
}
This might help you in playing,pausing and fully stopping your particle system from emitting particles.
Hope i helped.

How to Unity create your own button to control the player on mobile device

i am not sure how to do this ,my goal is to create a button that will control the player on the plane for the mobile touch devices.I created the button I am trying to make it to control the player but i am failing here is my player c# script
using UnityEngine;
using System.Collections;
public class PlayerMovement : MonoBehaviour
{
public float speed = 500f, jumpHeight = 500f;
Transform myTrans;
Rigidbody2D myBody;
Vector2 movement;//used as temp holder variable before applying to velocity
bool isGrounded = true;
void Start ()
{
myTrans = this.transform;
myBody = this.GetComponent<Rigidbody2D>() ;
}
void Update ()
{
//Check if the player is grounded or not
isGrounded = Physics2D.Linecast(myTrans.position,
GameObject.Find(this.name+"/tag_ground").transform.position,
LayerMask.NameToLayer("Player"));
//Just for visualization, not needed for gameplay
Debug.DrawLine (myTrans.position,
GameObject.Find(this.name+"/tag_ground").transform.position,
Color.red);
//Works for keyboards and joysticks
#if !UNITY_ANDROID && !UNITY_IPHONE && !UNITY_BLACKBERRY && !UNITY_WINRT
Move (Input.GetAxisRaw("Horizontal"));
if (Input.GetButtonDown ("Jump"))
Jump ();
#endif
}
//
//Separate Move and Jump so they can be called externally by TouchButtons
//
public void Move(float horizontal_input)
{
movement = myBody.velocity;
if(isGrounded)//we can only move left and right if on the ground
movement.x = horizontal_input * speed * Time.deltaTime;
//Apply the movement to the player
myBody.velocity = movement;
}
public void Jump()//we can only jump if on the ground
{
if(isGrounded)
myBody.velocity += jumpHeight * Vector2.up * Time.deltaTime;
}
}
here is the touch logic script
using UnityEngine;
using System.Collections;
public class TouchLogicV2 : MonoBehaviour
{
public static int currTouch = 0;//so other scripts can know what touch is currently on screen
[HideInInspector]
public int touch2Watch = 64;
private new GUITexture guiTexture;
public virtual void Update()//If your child class uses Update, you must call base.Update(); to get this functionality
{
//is there a touch on screen?
if(Input.touches.Length <= 0)
{
OnNoTouches();
}
else //if there is a touch
{
//loop through all the the touches on screen
for(int i = 0; i < Input.touchCount; i++)
{
currTouch = i;
//executes this code for current touch (i) on screen
if(this.guiTexture != null && (this.guiTexture.HitTest(Input.GetTouch(i).position)))
{
//if current touch hits our guitexture, run this code
if(Input.GetTouch(i).phase == TouchPhase.Began)
{
OnTouchBegan();
touch2Watch = currTouch;
}
if(Input.GetTouch(i).phase == TouchPhase.Ended)
{
OnTouchEnded();
}
if(Input.GetTouch(i).phase == TouchPhase.Moved)
{
OnTouchMoved();
}
if(Input.GetTouch(i).phase == TouchPhase.Stationary)
{
OnTouchStayed();
}
}
//outside so it doesn't require the touch to be over the guitexture
switch(Input.GetTouch(i).phase)
{
case TouchPhase.Began:
OnTouchBeganAnywhere();
break;
case TouchPhase.Ended:
OnTouchEndedAnywhere();
break;
case TouchPhase.Moved:
OnTouchMovedAnywhere();
break;
case TouchPhase.Stationary:
OnTouchStayedAnywhere();
break;
}
}
}
}
//the default functions, define what will happen if the child doesn't override these functions
public virtual void OnNoTouches(){}
public virtual void OnTouchBegan(){print (name + " is not using OnTouchBegan");}
public virtual void OnTouchEnded(){}
public virtual void OnTouchMoved(){}
public virtual void OnTouchStayed(){}
public virtual void OnTouchBeganAnywhere(){}
public virtual void OnTouchEndedAnywhere(){}
public virtual void OnTouchMovedAnywhere(){}
public virtual void OnTouchStayedAnywhere(){}
}
I am following the touchlogic tuts but am struggling to move the player on mobile.Any with good examples on touch input is welcomed to give me the links for examples.thanks

Create effect dark and light when game start in unity

I am new in unity, i need to create a effect when my game is started:
When game is being started, game scene is dark
When game has already been started, game scene is light.
Please teach me how i can do it, Thanks so much.
1st you have to add Light on your scene from GameObject Menu in unity. If you need On Off that light just add the Javascript to your light. Lot of ways to do this Google it and easy to find.
var times : float;
function Start () {
light.intensity =0;
}
function Update () {
times += Time.deltaTime; // just I added time.
if (times > 5) // or Here you can use Press key events....
light.intensity =8;
}
Here's what I use. I create a pixel texture and use the GUI system to ensure it'll be drawn before everything in the scene. I actually use a display manager to control the order GUI elements are drawn to make sure the overlay is in front of other gui elements.
using UnityEngine;
using System.Collections;
public class Overlay : MonoBehaviour {
//----------------------------------------------------------------------------------------------------//
[SerializeField]
private Color screenColor = Color.black;
[SerializeField]
private float fadeSpeed = 0.5f;
//----------------------------------------------------------------------------------------------------//
static private Texture2D overlay;
static private bool fadeOverlay;
static private bool fadeOverlayIn;
//----------------------------------------------------------------------------------------------------//
public void Awake() {
//Setup the values for the fading overlay.
overlay = new Texture2D( 1, 1 );
overlay.SetPixel( 0, 0, screenColor );
overlay.Apply();
fadeOverlay = true;
fadeOverlayIn = false;
}
public void Update() {
if ( fadeOverlay ) {
if ( fadeOverlayIn ) {
screenColor.a += Time.deltaTime * fadeSpeed;
if ( screenColor.a >= 1 ) {
fadeOverlay = false;
}
} else {
screenColor.a -= Time.deltaTime * fadeSpeed;
if ( screenColor.a <= 0 ) {
fadeOverlay = false;
}
}
overlay.SetPixel( 0, 0, screenColor );
overlay.Apply();
}
}
public void OnGUI() {
if ( fadeOverlayIn || fadeOverlay ) {
GUI.DrawTexture( Camera.main.pixelRect, overlay );
}
}
static public void Toggle() {
fadeOverlay = true;
fadeOverlayIn = !fadeOverlayIn;
}
//----------------------------------------------------------------------------------------------------//
}