Poor screen share using Agora Virtual Camera Prefab from oculus - unity3d

I'm currently working on an unity project, where I'm trying to screenshare my content from oculus quest 2 to a remote pc with audio chat enabled using Agora Virtual Camera Prefab package. I actually followed this blog for the implementation https://www.agora.io/en/blog/how-to-build-a-vr-video-chat-app-using-unitys-xr-framework/. I successfully implemented this and connected with a remote pc, but all I'm experiencing is a very poor screenshare of oculus(most of the times black screen) in the remote pc. But audio is better only a lag of 1 or 2 sec, plus even I'm able to see the remote pc user's cam video inside my game through oculus. It'll be lot helpful if anyone could help me with this issue, I'm totally stuck with this. I'm also attaching screenshot of the agora virtual camera prefab's config and the code used for any suggestions.
using System.Collections;
using agora_gaming_rtc;
using UnityEngine;
using UnityEngine.UI;
using static agora_gaming_rtc.ExternalVideoFrame;
using agora_utilities;
using System.Collections.Generic;
#if (UNITY_2018_3_OR_NEWER && UNITY_ANDROID)
using UnityEngine.Android;
#endif
public class AgoraVirtualCamera : MonoBehaviour
{
// Use this for initialization
#if (UNITY_2018_3_OR_NEWER && UNITY_ANDROID)
private ArrayList permissionList = new ArrayList();
#endif
// PLEASE KEEP THIS App ID IN SAFE PLACE
// Get your own App ID at https://dashboard.agora.io/
[Header("Agora Config")]
[SerializeField]
private string AppID = "";
[SerializeField]
private string TempToken = "";
[SerializeField]
private string TokenServerURL = "";
[SerializeField]
private string ChannelName = "";
[Header("Env Config")]
[SerializeField]
private Camera VirtualCam;
[SerializeField]
private GameObject RemoteVideoRoot;
[SerializeField]
private GameObject RemoteScreenVideoRoot;
/*[SerializeField]
private int ScreenShareUID;*/
[SerializeField]
private Text LogText;
[Header("UI Btn Config")]
public GameObject JoinBtn;
public GameObject LeaveBtn;
public GameObject MicBtn;
public GameObject QuitBtn;
public Color ActiveMicColor = Color.green;
public Color DisabledMicColor = Color.red;
[Header("Video Encoder Config")]
[SerializeField]
private VideoDimensions dimensions = new VideoDimensions
{
width = 1280,
height = 720
};
[SerializeField]
private int bitrate = 1130;
[SerializeField]
private FRAME_RATE frameRate = FRAME_RATE.FRAME_RATE_FPS_30;
[SerializeField]
private VIDEO_MIRROR_MODE_TYPE mirrorMode = VIDEO_MIRROR_MODE_TYPE.VIDEO_MIRROR_MODE_DISABLED;
// use bitrate: 2260 for broadcast mode
// Pixel format
public static TextureFormat ConvertFormat = TextureFormat.RGBA32;
public static VIDEO_PIXEL_FORMAT PixelFormat = VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_RGBA;
private static int ShareCameraMode = 1; // 0 = unsafe buffer pointer, 1 = renderer image
// used for setting frame order
int timeStampCount = 0; // monotonic timestamp counter
// perspective camera buffer
private Texture2D BufferTexture;
// output log
private Logger logger;
// uid
private uint UID = 0; // 0 tells the agora engine to generate the uid
// reference to the active agora client
static AgoraInterface client = null;
// keep track of remote UID
Dictionary<string, List<uint>> RemoteUIDs = new Dictionary<string, List<uint>>();
// keep track of channel state
bool InChannel = false;
#region --- Life Cycles ---
void Awake()
{
// keep this alive across scenes
//DontDestroyOnLoad(this.gameObject);
}
// Start is called before the first frame update
void Start()
{
CheckAppId();// ensure an AppID is defined
// if there isn't a join button defined, autojoin
if (JoinBtn == null || !JoinBtn.activeInHierarchy)
{
JoinChannel();
}
}
// Update is called once per frame
void Update()
{
PermissionHelper.RequestMicrophontPermission();
PermissionHelper.RequestCameraPermission();
}
void OnDisable()
{
LeaveChannel();
}
void OnApplicationPause(bool paused)
{
if (client != null)
{
client.EnableVideo(paused);
client.EnableAudio(paused);
}
}
void OnApplicationQuit()
{
ShareCameraMode = 0;
if (client != null)
{
client.Leave();
client.UnloadEngine();
}
}
#endregion
#region --- Agora Functions ---
void ReloadAgoraEngine()
{
client = GetComponent<AgoraInterface>();
if (client != null)
{
client.Leave();
client.UnloadEngine();
Destroy(client);
client = null;
}
client = gameObject.AddComponent<AgoraInterface>();
client.SetLogger(logger);
// video config
VideoEncoderConfiguration videoEncodeConfig = new VideoEncoderConfiguration
{
dimensions = this.dimensions,
frameRate = this.frameRate,
bitrate = this.bitrate,
orientationMode = ORIENTATION_MODE.ORIENTATION_MODE_FIXED_LANDSCAPE,
mirrorMode = this.mirrorMode
};
client.SetVideoEncoderConfig(videoEncodeConfig);
}
// agora functions
public void JoinChannel()
{
// clean up and create a new one
ReloadAgoraEngine();
string appidMSG = string.Format("Initializing client with appid: ${0}", AppID);
logger.UpdateLog(appidMSG);
client.LoadEngine(AppID); // load engine
// Set up the texture for rendering POV as a texture
if (VirtualCam.isActiveAndEnabled)
{
logger.UpdateLog("Virtual Camera is Active and Enabled, Enable custom video source");
client.CustomVideo = true;
int width = Screen.width;
int height = Screen.height;
}
AddCallbackEvents(); // add custom event handling
if (TokenServerURL != "")
{
client.JoinWithTokenServer(ChannelName, UID, TokenServerURL);
}
else
{
// joing with or without a token
client.Join(ChannelName, TempToken, UID);
string joiningChannelMsg = string.Format("Joining channel: {0}, with uid: {1}", ChannelName, UID);
logger.UpdateLog(joiningChannelMsg);
}
}
public void LeaveChannel()
{
if (client != null)
{
client.Leave();
}
DisableSharing();
InChannel = false;
// change mic buttn text and color - help user visualize they left the channel
if (MicBtn != null)
{
MicBtn.GetComponentInChildren<Text>().text = "MIC";
MicBtn.GetComponent<Image>().color = Color.white;
}
// remove the remote video planes
if (gameObject.activeInHierarchy)
{
if (RemoteVideoRoot?.transform.childCount > 0)
{
foreach (Transform child in RemoteVideoRoot.transform)
{
GameObject.Destroy(child.gameObject);
}
StartCoroutine(UiUpdate(0.5f));
}
}
}
public void ToggleMic()
{
if (!InChannel)
return; // only toggle mic when in a channel
Text MicBtnText = MicBtn.GetComponentInChildren<Text>();
Image micBtnImg = MicBtn.GetComponent<Image>();
if (micBtnImg.color == Color.green)
{
client.MuteLocalAudioStream(true);
MicBtnText.text = "Mic OFF";
micBtnImg.color = DisabledMicColor;
}
else if (micBtnImg.color == Color.red)
{
client.MuteLocalAudioStream(false);
MicBtnText.text = "Mic ON";
micBtnImg.color = ActiveMicColor;
}
else
{
client.MuteLocalAudioStream(true); // mute by default
MicBtnText.text = "- MUTED -";
MicBtnText.color = Color.white;
micBtnImg.color = DisabledMicColor;
}
}
// Called by quit button
public void ExitApp()
{
#if UNITY_EDITOR
// Application.Quit() does not work in the editor so
// UnityEditor.EditorApplication.isPlaying need to be set to false to end the game
UnityEditor.EditorApplication.isPlaying = false;
#else
Application.Quit();
#endif
}
#endregion
#region --- Callback handlers ---
protected virtual void AddCallbackEvents()
{
IRtcEngine mRtcEngine = IRtcEngine.QueryEngine();
mRtcEngine.OnJoinChannelSuccess += OnJoinChannelSuccess;
mRtcEngine.OnUserJoined += OnUserJoined;
mRtcEngine.OnUserOffline += OnUserOffline;
}
public void OnJoinChannelSuccess(string channelName, uint uid, int elapsed)
{
InChannel = true;
if (VirtualCam != null && VirtualCam.isActiveAndEnabled)
{
logger.UpdateLog("Enable Virtual Camera Sharing");
EnableVirtualCameraSharing();
}
else
{
logger.UpdateLog("ERROR: Failed to find perspective camera.");
}
// update mic button color and text - visually show joined channel
if (MicBtn != null)
{
MicBtn.GetComponentInChildren<Text>().text = "MIC ON";
MicBtn.GetComponent<Image>().color = ActiveMicColor;
}
// enable dual stream mode
IRtcEngine mRtcEngine = IRtcEngine.QueryEngine();
mRtcEngine.EnableDualStreamMode(true);
}
public void OnUserJoined(uint uid, int elapsed)
{
// add video streams from all users in the channel
// offset the new video plane based on the parent's number of children.
//float xOffset = RemoteVideoRoot.transform.childCount * 3.5f;
//MakeVideoView(uid, RemoteVideoRoot, new Vector3(xOffset, 0, 0), Quaternion.Euler(270, 0, 0));
// to restrict which user video streams appear
// only show users with uid 100-1009 or 49024 (screen share)",
// uid 49024 is an arbitrary number that was selected and hardcoded as uid for the screen share stream from the web demo code. This uid can be customized
string remoteUIDtype;
if (uid >= 1000 && uid <= 1009)
{
// offset the new video plane based on the parent's number of children.
float xOffset = RemoteVideoRoot.transform.childCount * -3.69f;
MakeVideoView(uid, RemoteVideoRoot, new Vector3(xOffset, 0, 0), Quaternion.Euler(270, 180, 0), new Vector3(1.0f, 1.0f, 0.5625f));
remoteUIDtype = "admin";
} else if (uid == 49024 && RemoteScreenVideoRoot != null)
{
MakeVideoView(uid, RemoteScreenVideoRoot, new Vector3(0, 0, 0), Quaternion.Euler(270, 0, 0), new Vector3(-1.777f,-1.0f, -1.0f));
remoteUIDtype = "screen";
}
else
{
IRtcEngine mRtcEngine = IRtcEngine.QueryEngine();
// unsubscribe from video & audio streams
mRtcEngine.MuteRemoteVideoStream(uid, true);
mRtcEngine.MuteRemoteAudioStream(uid, true);
remoteUIDtype = "peer";
}
// keep track of the remote uids
logger.UpdateLog($"Make Remote Video UID type:{remoteUIDtype}");
if (RemoteUIDs.ContainsKey(remoteUIDtype))
{
RemoteUIDs[remoteUIDtype].Add(uid);
} else {
RemoteUIDs.Add(remoteUIDtype, new List<uint> { uid });
}
}
public void OnUserOffline(uint uid, USER_OFFLINE_REASON reason)
{
logger.UpdateLog("onUserOffline: update UI");
// update the position of the remaining children
StartCoroutine(UiUpdate(0.5f));
}
#endregion
#region --- misc helper functions ---
public void SetResolution(VideoDimensions newDimensions, int newBitrate)
{
dimensions = newDimensions;
bitrate = newBitrate;
VideoEncoderConfiguration videoEncodeConfig = new VideoEncoderConfiguration
{
dimensions = this.dimensions,
frameRate = this.frameRate,
bitrate = this.bitrate,
orientationMode = ORIENTATION_MODE.ORIENTATION_MODE_FIXED_LANDSCAPE,
mirrorMode = this.mirrorMode
};
client.SetVideoEncoderConfig(videoEncodeConfig);
}
private void CheckAppId()
{
logger = new Logger(LogText);
logger.DebugAssert(AppID.Length > 10, "Please fill in your AppId"); // Checks that AppID is set.
}
private void MakeVideoView(uint uid, GameObject parentNode, Vector3 position, Quaternion rotation, Vector3 scale)
{
logger.UpdateLog(string.Format("Make Remote Video View for UID: {0}.", uid));
GameObject go = GameObject.Find(uid.ToString());
if (go != null)
{
return; // reuse
}
// create a GameObject and assign to this new user
VideoSurface videoSurface = makePlaneSurface(uid.ToString(), parentNode, position, rotation, scale);
if (videoSurface != null)
{
// configure videoSurface
videoSurface.SetForUser(uid);
videoSurface.SetEnable(true);
videoSurface.SetVideoSurfaceType(AgoraVideoSurfaceType.Renderer);
videoSurface.SetGameFps(30);
}
}
// VIDEO TYPE 1: 3D Object
public VideoSurface makePlaneSurface(string goName, GameObject parentNode, Vector3 position, Quaternion rotation, Vector3 scale)
{
GameObject go = GameObject.CreatePrimitive(PrimitiveType.Plane);
if (go == null)
{
return null;
}
go.name = goName;
go.transform.localScale = scale; // scale the video (4:3)
if (parentNode != null)
{
go.transform.parent = parentNode.transform;
go.transform.localPosition = position;
go.transform.localRotation = rotation;
Debug.Log("add video view");
}
else
{
Debug.Log("parentNode is null video view");
go.transform.localPosition = new Vector3(0, 0, 0f);
go.transform.localRotation = Quaternion.Euler(270, 0, 0);
}
// configure videoSurface
VideoSurface videoSurface = go.AddComponent<VideoSurface>();
return videoSurface;
}
IEnumerator UiUpdate(float time)
{
yield return new WaitForSeconds(time);
// update the UI
for (int i = 0; i < RemoteVideoRoot.transform.childCount; i++)
{
float xOffset = -1 * i * 3.69f; // calculate the new position
RemoteVideoRoot.transform.GetChild(i).localPosition = new Vector3(xOffset, 0, 0); // update the position
}
}
#endregion
#region --- Virtual Camera video frame sharing ---
void EnableVirtualCameraSharing()
{
RenderTexture renderTexture = VirtualCam.targetTexture;
if (renderTexture != null)
{
BufferTexture = new Texture2D(renderTexture.width, renderTexture.height, ConvertFormat, false);
StartCoroutine(CoShareRenderData()); // use co-routine to push frames into the Agora stream
} else
{
logger.UpdateLog("Error: No Render Texture Found. Check Virtual Camera.");
}
}
void DisableSharing()
{
BufferTexture = null;
}
IEnumerator CoShareRenderData()
{
while (ShareCameraMode == 1)
{
yield return new WaitForEndOfFrame();
ShareRenderTexture();
}
yield return null;
}
private void ShareRenderTexture()
{
if (BufferTexture == null) // offlined
{
return;
}
Camera targetCamera = VirtualCam; // AR Camera
RenderTexture.active = targetCamera.targetTexture; // the targetTexture holds render texture
Rect rect = new Rect(0, 0, targetCamera.targetTexture.width, targetCamera.targetTexture.height);
BufferTexture.ReadPixels(rect, 0, 0);
BufferTexture.Apply();
byte[] bytes = BufferTexture.GetRawTextureData();
// sends the Raw data contained in bytes
//monoProxy.StartCoroutine(PushFrame(bytes, (int)rect.width, (int)rect.height,
//() =>
//{
// bytes = null;
//}));
StartCoroutine(PushFrame(bytes, (int)rect.width, (int)rect.height,
() =>
{
bytes = null;
}));
RenderTexture.active = null;
}
/// <summary>
/// Push frame to the remote client. This is the same code that does ScreenSharing.
/// </summary>
/// <param name="bytes">raw video image data</param>
/// <param name="width"></param>
/// <param name="height"></param>
/// <param name="onFinish">callback upon finish of the function</param>
/// <returns></returns>
IEnumerator PushFrame(byte[] bytes, int width, int height, System.Action onFinish)
{
if (bytes == null || bytes.Length == 0)
{
Debug.LogError("Zero bytes found!!!!");
yield break;
}
IRtcEngine rtc = IRtcEngine.QueryEngine();
//if the engine is present
if (rtc != null)
{
//Create a new external video frame
ExternalVideoFrame externalVideoFrame = new ExternalVideoFrame();
//Set the buffer type of the video frame
externalVideoFrame.type = ExternalVideoFrame.VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA;
// Set the video pixel format
externalVideoFrame.format = PixelFormat; // VIDEO_PIXEL_RGBA
//apply raw data you are pulling from the rectangle you created earlier to the video frame
externalVideoFrame.buffer = bytes;
//Set the width of the video frame (in pixels)
externalVideoFrame.stride = width;
//Set the height of the video frame
externalVideoFrame.height = height;
//Remove pixels from the sides of the frame
externalVideoFrame.cropLeft = 10;
externalVideoFrame.cropTop = 10;
externalVideoFrame.cropRight = 10;
externalVideoFrame.cropBottom = 10;
//Rotate the video frame (0, 90, 180, or 270)
externalVideoFrame.rotation = 180;
// increment i with the video timestamp
//externalVideoFrame.timestamp = System.DateTime.Now.Ticks;
externalVideoFrame.timestamp = timeStampCount++;
//Push the external video frame with the frame we just created
int a = 0;
rtc.PushVideoFrame(externalVideoFrame);
if (timeStampCount % 100 == 0) Debug.Log(" pushVideoFrame(" + timeStampCount + ") size:" + bytes.Length + " => " + a);
}
yield return null;
onFinish();
}
#endregion
}

Related

How can I get a Target name from an image target to play a video player when finding a marker in an augmented reality app in Unity and vuforia

enter image description hereHow can I get a Target name from an image target to play a video player when finding a marker in an augmented reality app. basically I have this script and I want to modify it so that when detecting a marker it shows a 3d augmented reality object and on the side a video.Here's the script:
/*==============================================================================
Copyright (c) 2021 PTC Inc. All Rights Reserved.
Confidential and Proprietary - Protected under copyright and other laws.
Vuforia is a trademark of PTC Inc., registered in the United States and other
countries.
==============================================================================*/
using System;
using UnityEngine;
using UnityEngine.Events;
using Vuforia;
using UnityEngine.Video;
/// <summary>
/// A custom handler that implements the ITrackableEventHandler interface.
///
/// Changes made to this file could be overwritten when upgrading the Vuforia version.
/// When implementing custom event handler behavior, consider inheriting from this class instead.
/// </summary>
/// </summary>
public class ScriptModificado : MonoBehaviour
{
public enum TrackingStatusFilter
{
Tracked,
Tracked_ExtendedTracked,
Tracked_ExtendedTracked_Limited
}
/// <summary>
/// A filter that can be set to either:
/// - Only consider a target if it's in view (TRACKED)
/// - Also consider the target if's outside of the view, but the environment is tracked (EXTENDED_TRACKED)
/// - Even consider the target if tracking is in LIMITED mode, e.g. the environment is just 3dof tracked.
/// </summary>
public TrackingStatusFilter StatusFilter = TrackingStatusFilter.Tracked_ExtendedTracked_Limited;
public bool UsePoseSmoothing = false;
public AnimationCurve AnimationCurve = AnimationCurve.Linear(0, 0, LERP_DURATION, 1);
public UnityEvent OnTargetFound;
public UnityEvent OnTargetLost;
protected ObserverBehaviour mObserverBehaviour;
protected TargetStatus mPreviousTargetStatus = TargetStatus.NotObserved;
protected bool mCallbackReceivedOnce;
const float LERP_DURATION = 0.3f;
PoseSmoother mPoseSmoother;
// public AudioSource aSource;
// public AudioClip aClip;
public VideoPlayer vPlayer;
public GameObject BotonReproducir;
protected virtual void Start()
{
mObserverBehaviour = GetComponent<ObserverBehaviour>();
if (mObserverBehaviour)
{
mObserverBehaviour.OnTargetStatusChanged += OnObserverStatusChanged;
mObserverBehaviour.OnBehaviourDestroyed += OnObserverDestroyed;
OnObserverStatusChanged(mObserverBehaviour, mObserverBehaviour.TargetStatus);
SetupPoseSmoothing();
}
}
protected virtual void OnDestroy()
{
if (VuforiaBehaviour.Instance != null)
VuforiaBehaviour.Instance.World.OnStateUpdated -= OnStateUpdated;
if (mObserverBehaviour)
OnObserverDestroyed(mObserverBehaviour);
mPoseSmoother?.Dispose();
}
void OnObserverDestroyed(ObserverBehaviour observer)
{
mObserverBehaviour.OnTargetStatusChanged -= OnObserverStatusChanged;
mObserverBehaviour.OnBehaviourDestroyed -= OnObserverDestroyed;
mObserverBehaviour = null;
}
void OnObserverStatusChanged(ObserverBehaviour behaviour, TargetStatus targetStatus)
{
var name = mObserverBehaviour.TargetName;
if (mObserverBehaviour is VuMarkBehaviour vuMarkBehaviour && vuMarkBehaviour.InstanceId != null)
{
name += " (" + vuMarkBehaviour.InstanceId + ")";
}
Debug.Log($"Target status: {name} {targetStatus.Status} -- {targetStatus.StatusInfo}");
HandleTargetStatusChanged(mPreviousTargetStatus.Status, targetStatus.Status);
HandleTargetStatusInfoChanged(targetStatus.StatusInfo);
mPreviousTargetStatus = targetStatus;
}
protected virtual void HandleTargetStatusChanged(Status previousStatus, Status newStatus)
{
var shouldBeRendererBefore = ShouldBeRendered(previousStatus);
var shouldBeRendererNow = ShouldBeRendered(newStatus);
if (shouldBeRendererBefore != shouldBeRendererNow)
{
if (shouldBeRendererNow)
{
OnTrackingFound();
/*if (mObserverBehaviour.TargetName == "nocturno")
{
// aSource.PlayOneShot(aClip);
} */
if (mObserverBehaviour.TargetName == "nocturno")
{
vPlayer.Play();
}
}
else
{
OnTrackingLost();
if (mObserverBehaviour.TargetName == "nocturno")
{
// aSource.Stop();
vPlayer.Stop();
BotonReproducir.SetActive(false);
}
}
}
else
{
if (!mCallbackReceivedOnce && !shouldBeRendererNow)
{
// This is the first time we are receiving this callback, and the target is not visible yet.
// --> Hide the augmentation.
OnTrackingLost();
}
}
mCallbackReceivedOnce = true;
}
protected virtual void HandleTargetStatusInfoChanged(StatusInfo newStatusInfo)
{
if (newStatusInfo == StatusInfo.WRONG_SCALE)
{
Debug.LogErrorFormat("The target {0} appears to be scaled incorrectly. " +
"This might result in tracking issues. " +
"Please make sure that the target size corresponds to the size of the " +
"physical object in meters and regenerate the target or set the correct " +
"size in the target's inspector.", mObserverBehaviour.TargetName);
}
}
protected bool ShouldBeRendered(Status status)
{
if (status == Status.TRACKED)
{
// always render the augmentation when status is TRACKED, regardless of filter
return true;
}
if (StatusFilter == TrackingStatusFilter.Tracked_ExtendedTracked && status == Status.EXTENDED_TRACKED)
{
// also return true if the target is extended tracked
return true;
}
if (StatusFilter == TrackingStatusFilter.Tracked_ExtendedTracked_Limited &&
(status == Status.EXTENDED_TRACKED || status == Status.LIMITED))
{
// in this mode, render the augmentation even if the target's tracking status is LIMITED.
// this is mainly recommended for Anchors.
return true;
}
return false;
}
protected virtual void OnTrackingFound()
{
if (mObserverBehaviour)
{
var rendererComponents = mObserverBehaviour.GetComponentsInChildren<Renderer>(true);
var colliderComponents = mObserverBehaviour.GetComponentsInChildren<Collider>(true);
var canvasComponents = mObserverBehaviour.GetComponentsInChildren<Canvas>(true);
// Enable rendering:
foreach (var component in rendererComponents)
component.enabled = true;
// Enable colliders:
foreach (var component in colliderComponents)
component.enabled = true;
// Enable canvas':
foreach (var component in canvasComponents)
component.enabled = true;
}
OnTargetFound?.Invoke();
}
protected virtual void OnTrackingLost()
{
if (mObserverBehaviour)
{
var rendererComponents = mObserverBehaviour.GetComponentsInChildren<Renderer>(true);
var colliderComponents = mObserverBehaviour.GetComponentsInChildren<Collider>(true);
var canvasComponents = mObserverBehaviour.GetComponentsInChildren<Canvas>(true);
// Disable rendering:
foreach (var component in rendererComponents)
component.enabled = false;
// Disable colliders:
foreach (var component in colliderComponents)
component.enabled = false;
// Disable canvas':
foreach (var component in canvasComponents)
component.enabled = false;
}
OnTargetLost?.Invoke();
}
protected void SetupPoseSmoothing()
{
UsePoseSmoothing &= VuforiaBehaviour.Instance.WorldCenterMode == WorldCenterMode.DEVICE; // pose smoothing only works with the DEVICE world center mode
mPoseSmoother = new PoseSmoother(mObserverBehaviour, AnimationCurve);
VuforiaBehaviour.Instance.World.OnStateUpdated += OnStateUpdated;
}
void OnStateUpdated()
{
if (enabled && UsePoseSmoothing)
mPoseSmoother.Update();
}
class PoseSmoother
{
const float e = 0.001f;
const float MIN_ANGLE = 2f;
PoseLerp mActivePoseLerp;
Pose mPreviousPose;
readonly ObserverBehaviour mTarget;
readonly AnimationCurve mAnimationCurve;
TargetStatus mPreviousStatus;
public PoseSmoother(ObserverBehaviour target, AnimationCurve animationCurve)
{
mTarget = target;
mAnimationCurve = animationCurve;
}
public void Update()
{
var currentPose = new Pose(mTarget.transform.position, mTarget.transform.rotation);
var currentStatus = mTarget.TargetStatus;
UpdatePoseSmoothing(currentPose, currentStatus);
mPreviousPose = currentPose;
mPreviousStatus = currentStatus;
}
void UpdatePoseSmoothing(Pose currentPose, TargetStatus currentTargetStatus)
{
if (mActivePoseLerp == null && ShouldSmooth(currentPose, currentTargetStatus))
{
mActivePoseLerp = new PoseLerp(mPreviousPose, currentPose, mAnimationCurve);
}
if (mActivePoseLerp != null)
{
var pose = mActivePoseLerp.GetSmoothedPosition(Time.deltaTime);
mTarget.transform.SetPositionAndRotation(pose.position, pose.rotation);
if (mActivePoseLerp.Complete)
{
mActivePoseLerp = null;
}
}
}
/// Smooth pose transition if the pose changed and the target is still being reported as "extended tracked" or it has just returned to
/// "tracked" from previously being "extended tracked"
bool ShouldSmooth(Pose currentPose, TargetStatus currentTargetStatus)
{
return (currentTargetStatus.Status == Status.EXTENDED_TRACKED || (currentTargetStatus.Status == Status.TRACKED && mPreviousStatus.Status == Status.EXTENDED_TRACKED)) &&
(Vector3.SqrMagnitude(currentPose.position - mPreviousPose.position) > e || Quaternion.Angle(currentPose.rotation, mPreviousPose.rotation) > MIN_ANGLE);
}
public void Dispose()
{
mActivePoseLerp = null;
}
}
class PoseLerp
{
readonly AnimationCurve mCurve;
readonly Pose mStartPose;
readonly Pose mEndPose;
readonly float mEndTime;
float mElapsedTime;
public bool Complete { get; private set; }
public PoseLerp(Pose startPose, Pose endPose, AnimationCurve curve)
{
mStartPose = startPose;
mEndPose = endPose;
mCurve = curve;
mEndTime = mCurve.keys[mCurve.length - 1].time;
}
public Pose GetSmoothedPosition(float deltaTime)
{
mElapsedTime += deltaTime;
if (mElapsedTime >= mEndTime)
{
mElapsedTime = 0;
Complete = true;
return mEndPose;
}
var ratio = mCurve.Evaluate(mElapsedTime);
var smoothPosition = Vector3.Lerp(mStartPose.position, mEndPose.position, ratio);
var smoothRotation = Quaternion.Slerp(mStartPose.rotation, mEndPose.rotation, ratio);
return new Pose(smoothPosition, smoothRotation);
}
}
}
So how do I get the target name of the mObserverBehaviour method to change it to the "night" one and put the target name of my image target that will play the video? and how can I modify this code to show a video when the vuforia AR camera detects the image target so that the video is displayed on the side in a 2d Sprite object so that it looks more or less like this video: https://www.youtube.com/watch?v=izJatV5ypvM
attached imagesenter image description here
[1]: https://i.stack.imgur.com/Buleh.png
enter image description here
enter image description here
enter image description here

HoloLens - How to get webcam texture 2D from Vuforia

I'm developing a Vuforia app for HoloLens by using Unity.
This app displays a simple 3D Object when an image target is detected.
I'm also using the fm Exhibition Tool Pack hololens from the Unity Asset Store in order to stream the app running on HoloLens to a PC.
Everything works fine but when i stream the app to PC i see the 3D Unity scene instead of the room.
So i've tried to get the webcam texture and attach it to a cube inside the scene but the vuforia ARCamera get somehow conflict with it and i can't see anything on the cube. Instead when i run the app inside the Unity Simulator i see myself on the cube.
Is there a way to get the webcam texture 2D from Vuforia and attach it to a GameObject inside the scene? Maybe with the Vuforia.Image class? But i don't know how it works.
Below scripts are compatible with FMETP STREAM.
The scripts are tested on mobile.
using UnityEngine;
using System.Collections;
using Vuforia;
using UnityEngine.UI;
public class VuforiaCamAccess : MonoBehaviour
{
private bool mAccessCameraImage = true;
public RawImage rawImage;
public GameObject Mesh;
private Texture2D texture;
#if UNITY_EDITOR
private Vuforia.PIXEL_FORMAT mPixelFormat = Vuforia.PIXEL_FORMAT.GRAYSCALE;
#else
private Vuforia.PIXEL_FORMAT mPixelFormat = Vuforia.PIXEL_FORMAT.RGB888;
#endif
private bool mFormatRegistered = false;
void Start()
{
#if UNITY_EDITOR
texture = new Texture2D(Screen.width, Screen.height, TextureFormat.R8, false);
#else
texture = new Texture2D(Screen.width, Screen.height, TextureFormat.RGB24, false);
#endif
// Register Vuforia life-cycle callbacks:
Vuforia.VuforiaARController.Instance.RegisterVuforiaStartedCallback(OnVuforiaStarted);
Vuforia.VuforiaARController.Instance.RegisterOnPauseCallback(OnPause);
Vuforia.VuforiaARController.Instance.RegisterTrackablesUpdatedCallback(OnTrackablesUpdated);
}
private void OnVuforiaStarted()
{
// Try register camera image format
if (CameraDevice.Instance.SetFrameFormat(mPixelFormat, true))
{
Debug.Log("Successfully registered pixel format " + mPixelFormat.ToString());
mFormatRegistered = true;
}
else
{
Debug.LogError("Failed to register pixel format " + mPixelFormat.ToString() +
"\n the format may be unsupported by your device;" +
"\n consider using a different pixel format.");
mFormatRegistered = false;
}
}
private void OnPause(bool paused)
{
if (paused)
{
Debug.Log("App was paused");
UnregisterFormat();
}
else
{
Debug.Log("App was resumed");
RegisterFormat();
}
}
private void OnTrackablesUpdated()
{
//skip if still loading image to texture2d
if (LoadingTexture) return;
if (mFormatRegistered)
{
if (mAccessCameraImage)
{
Vuforia.Image image = CameraDevice.Instance.GetCameraImage(mPixelFormat);
//if (image != null && image.IsValid())
if (image != null)
{
byte[] pixels = image.Pixels;
int width = image.Width;
int height = image.Height;
StartCoroutine(SetTexture(pixels, width, height));
}
}
}
}
bool LoadingTexture = false;
IEnumerator SetTexture(byte[] pixels, int width, int height)
{
if (!LoadingTexture)
{
LoadingTexture = true;
if (pixels != null && pixels.Length > 0)
{
if (texture.width != width || texture.height != height)
{
#if UNITY_EDITOR
texture = new Texture2D(width, height, TextureFormat.R8, false);
#else
texture = new Texture2D(width, height, TextureFormat.RGB24, false);
#endif
}
texture.LoadRawTextureData(pixels);
texture.Apply();
if (rawImage != null)
{
rawImage.texture = texture;
rawImage.material.mainTexture = texture;
}
if (Mesh != null) Mesh.GetComponent<Renderer>().material.mainTexture = texture;
}
yield return null;
LoadingTexture = false;
}
}
private void UnregisterFormat()
{
Debug.Log("Unregistering camera pixel format " + mPixelFormat.ToString());
CameraDevice.Instance.SetFrameFormat(mPixelFormat, false);
mFormatRegistered = false;
}
private void RegisterFormat()
{
if (CameraDevice.Instance.SetFrameFormat(mPixelFormat, true))
{
Debug.Log("Successfully registered camera pixel format " + mPixelFormat.ToString());
mFormatRegistered = true;
}
else
{
Debug.LogError("Failed to register camera pixel format " + mPixelFormat.ToString());
mFormatRegistered = false;
}
}
}

Unity 2D Camera Zooms in when character moves, but properties of the camera do not change

After I click play the camera stays as it should be, but when I move my character the camera zooms in for no reason
The properties of the camera do not change at all, zoom and everything stays the same. Tryed changing from orthographic to perspective no change, move z axis no change, change scale no change, change resolution and no change, making the camera not a parent and no change it behaves the same as parent and as child
before character walks
after character walks
I dont think that there is something to do with the code but here is the code attached to my character, the camera behaves the same as child and as parent
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class movement : MonoBehaviour
{
public float speed = 5f;
public float jumpSpeed = 8f;
private float movementX = 0f;
private Rigidbody2D rigidBody;
public Transform groundCheckPoint;
public float groundCheckRadius;
public LayerMask groundLayer;
public bool isTouchingGround;
public SpriteRenderer box;
private bool canSpawn = true;
private bool canAnimateWalk = true;
private bool canAnimateIdle = true;
private bool canAnimateJump = true;
private bool stopJump = true;
private int spawningSpeed = 1000;
// Use this for initialization
void Start()
{
rigidBody = GetComponent<Rigidbody2D>();
}
// Update is called once per frame
void Update()
{
isTouchingGround = Physics2D.OverlapBox(groundCheckPoint.position,new Vector2(0.9f,0.1f),0f, groundLayer);
movementX = Input.GetAxis("Horizontal");
if (movementX > 0f)
{
if(canAnimateWalk==true && isTouchingGround)
{
canAnimateWalk = false;
StartCoroutine(AnimateWalk());
}
GetComponent<SpriteRenderer>().transform.localScale = new Vector3(2, 2, 1);
rigidBody.velocity = new Vector2(movementX * speed, rigidBody.velocity.y);
}
else if (movementX < 0f)
{
if (canAnimateWalk == true && isTouchingGround)
{
canAnimateWalk = false;
StartCoroutine(AnimateWalk());
}
GetComponent<SpriteRenderer>().transform.localScale = new Vector3(-2, 2, 1);
rigidBody.velocity = new Vector2(movementX * speed, rigidBody.velocity.y);
}
else
{
if(isTouchingGround)
{
StopCoroutine(AnimateWalk());
if(canAnimateIdle==true)
{
canAnimateIdle = false;
StartCoroutine(AnimateIdle());
}
}
rigidBody.velocity = new Vector2(0, rigidBody.velocity.y);
}
if (Input.GetButtonDown("Jump") && isTouchingGround)
{
canAnimateJump = false;
rigidBody.velocity = new Vector2(rigidBody.velocity.x, jumpSpeed);
StartCoroutine(AnimateJump());
}
else if(!isTouchingGround)
{
StopCoroutine(AnimateWalk());
}
}
IEnumerator AnimateJump()
{
Debug.Log("Animating Jump");
int counter = 0;
while (counter < 10)
{
counter++;
GetComponent<SpriteRenderer>().sprite = Resources.Load<Sprite>("img/j" + counter);
yield return new WaitForSeconds(0.1f);
if(isTouchingGround==true)
{
break;
}
}
while(!isTouchingGround)
{
GetComponent<SpriteRenderer>().sprite = Resources.Load<Sprite>("img/j10");
yield return new WaitForSeconds(0.1f);
}
GetComponent<SpriteRenderer>().sprite = Resources.Load<Sprite>("img/i1");
canAnimateWalk = true;
canAnimateJump = true;
}
IEnumerator AnimateIdle()
{
int counter = 0;
while(Input.GetAxis("Horizontal")==0 && counter <10 && rigidBody.velocity.y==0)
{
counter++;
GetComponent<SpriteRenderer>().sprite = Resources.Load<Sprite>("img/i"+counter);
yield return new WaitForSeconds(0.2f);
}
canAnimateIdle = true;
}
IEnumerator AnimateWalk()
{
int counter = 0;
while (Input.GetAxis("Horizontal")!=0 && counter < 8 && rigidBody.velocity.y==0)
{
counter++;
GetComponent<SpriteRenderer>().sprite = Resources.Load<Sprite>("img/g" + counter);
yield return new WaitForSeconds(0.08f);
}
canAnimateWalk = true;
}
}
What could it be? I tried everything I think
GetComponent<SpriteRenderer>().transform.localScale = new Vector3(-2, 2, 1);
If your movement script is attached to your "guy" gameobject, then you are changing the (local) scale of it. All children will also scale accordingly.
Since your camera is a child of guy, it will scale and produce the result you see.
Try unparenting the Camera from your guy and create a seperate script that follows your guy and attach that to your Camera.
I solved my problem
The issue was in the character scaling. The camera did not change but the size of the character did making me believe that there was a zoom in.
My character x and y scale is 1 and 1 but I used 2 and 2 scale on move
The scale was used to rotate my character when it moves left and right

Bullet Shooting in unity

Hi i have below cod for my android project and when i look Up and Down and shoot a bullet the bullet shooting direction be wrong where is the problem?
here is a vidoe of my project.
http://www.mediafire.com/file/9xhxvyxds4cyego/2016-11-27_15-07-39.mp4
FirstPerson
using System;
using UnityEngine;
using UnityStandardAssets.CrossPlatformInput;
using UnityStandardAssets.Utility;
using Random = UnityEngine.Random;
namespace UnityStandardAssets.Characters.FirstPerson
{
[RequireComponent(typeof (CharacterController))]
[RequireComponent(typeof (AudioSource))]
public class FirstPersonController : MonoBehaviour
{
[SerializeField] private bool m_IsWalking;
[SerializeField] private float m_WalkSpeed;
[SerializeField] private float m_LookSpeed=4;
[SerializeField] private float m_RunSpeed;
[SerializeField] [Range(0f, 1f)] private float m_RunstepLenghten;
[SerializeField] private float m_JumpSpeed;
[SerializeField] private float m_StickToGroundForce;
[SerializeField] private float m_GravityMultiplier;
[SerializeField] private MouseLook m_MouseLook;
[SerializeField] private bool m_UseFovKick;
[SerializeField] private FOVKick m_FovKick = new FOVKick();
[SerializeField] private bool m_UseHeadBob;
[SerializeField] private CurveControlledBob m_HeadBob = new CurveControlledBob();
[SerializeField] private LerpControlledBob m_JumpBob = new LerpControlledBob();
[SerializeField] private float m_StepInterval;
[SerializeField] private AudioClip[] m_FootstepSounds; // an array of footstep sounds that will be randomly selected from.
[SerializeField] private AudioClip m_JumpSound; // the sound played when character leaves the ground.
[SerializeField] private AudioClip m_LandSound; // the sound played when character touches back on ground.
private Camera m_Camera;
private bool m_Jump;
private float m_YRotation;
private Vector2 m_Input;
private Vector3 m_MoveDir = Vector3.zero;
private CharacterController m_CharacterController;
private CollisionFlags m_CollisionFlags;
private bool m_PreviouslyGrounded;
private Vector3 m_OriginalCameraPosition;
private float m_StepCycle;
private float m_NextStep;
private bool m_Jumping;
private AudioSource m_AudioSource;
public GameObject AK;
// Use this for initialization
private void Start()
{
Instantiate(AK);
m_CharacterController = GetComponent<CharacterController>();
m_Camera = Camera.main;
m_OriginalCameraPosition = m_Camera.transform.localPosition;
m_FovKick.Setup(m_Camera);
m_HeadBob.Setup(m_Camera, m_StepInterval);
m_StepCycle = 0f;
m_NextStep = m_StepCycle/2f;
m_Jumping = false;
m_AudioSource = GetComponent<AudioSource>();
m_MouseLook.Init(transform , m_Camera.transform);
}
// Update is called once per frame
private void Update()
{
RotateView();
// the jump state needs to read here to make sure it is not missed
if (!m_Jump)
{
m_Jump = CrossPlatformInputManager.GetButtonDown("Jump");
}
if (!m_PreviouslyGrounded && m_CharacterController.isGrounded)
{
StartCoroutine(m_JumpBob.DoBobCycle());
PlayLandingSound();
m_MoveDir.y = 0f;
m_Jumping = false;
}
if (!m_CharacterController.isGrounded && !m_Jumping && m_PreviouslyGrounded)
{
m_MoveDir.y = 0f;
}
m_PreviouslyGrounded = m_CharacterController.isGrounded;
}
private void PlayLandingSound()
{
m_AudioSource.clip = m_LandSound;
m_AudioSource.Play();
m_NextStep = m_StepCycle + .5f;
}
private void FixedUpdate()
{
float speed;
GetInput(out speed);
// always move along the camera forward as it is the direction that it being aimed at
Vector3 desiredMove = transform.forward*m_Input.y + transform.right*m_Input.x;
// get a normal for the surface that is being touched to move along it
RaycastHit hitInfo;
Physics.SphereCast(transform.position, m_CharacterController.radius, Vector3.down, out hitInfo,
m_CharacterController.height/2f);
desiredMove = Vector3.ProjectOnPlane(desiredMove, hitInfo.normal).normalized;
m_MoveDir.x = desiredMove.x*speed;
m_MoveDir.z = desiredMove.z*speed;
if (m_CharacterController.isGrounded)
{
m_MoveDir.y = -m_StickToGroundForce;
if (m_Jump)
{
m_MoveDir.y = m_JumpSpeed;
PlayJumpSound();
m_Jump = false;
m_Jumping = true;
}
}
else
{
m_MoveDir += Physics.gravity*m_GravityMultiplier*Time.fixedDeltaTime;
}
m_CollisionFlags = m_CharacterController.Move(m_MoveDir*Time.fixedDeltaTime);
ProgressStepCycle(speed);
UpdateCameraPosition(speed);
}
private void PlayJumpSound()
{
m_AudioSource.clip = m_JumpSound;
m_AudioSource.Play();
}
private void ProgressStepCycle(float speed)
{
if (m_CharacterController.velocity.sqrMagnitude > 0 && (m_Input.x != 0 || m_Input.y != 0))
{
m_StepCycle += (m_CharacterController.velocity.magnitude + (speed*(m_IsWalking ? 1f : m_RunstepLenghten)))*
Time.fixedDeltaTime;
}
if (!(m_StepCycle > m_NextStep))
{
return;
}
m_NextStep = m_StepCycle + m_StepInterval;
PlayFootStepAudio();
}
private void PlayFootStepAudio()
{
if (!m_CharacterController.isGrounded)
{
return;
}
// pick & play a random footstep sound from the array,
// excluding sound at index 0
int n = Random.Range(1, m_FootstepSounds.Length);
m_AudioSource.clip = m_FootstepSounds[n];
m_AudioSource.PlayOneShot(m_AudioSource.clip);
// move picked sound to index 0 so it's not picked next time
m_FootstepSounds[n] = m_FootstepSounds[0];
m_FootstepSounds[0] = m_AudioSource.clip;
}
private void UpdateCameraPosition(float speed)
{
Vector3 newCameraPosition;
if (!m_UseHeadBob)
{
return;
}
if (m_CharacterController.velocity.magnitude > 0 && m_CharacterController.isGrounded)
{
m_Camera.transform.localPosition =
m_HeadBob.DoHeadBob(m_CharacterController.velocity.magnitude +
(speed*(m_IsWalking ? 1f : m_RunstepLenghten)));
newCameraPosition = m_Camera.transform.localPosition;
newCameraPosition.y = m_Camera.transform.localPosition.y - m_JumpBob.Offset();
}
else
{
newCameraPosition = m_Camera.transform.localPosition;
newCameraPosition.y = m_OriginalCameraPosition.y - m_JumpBob.Offset();
}
m_Camera.transform.localPosition = newCameraPosition;
}
private void GetInput(out float speed)
{
// Read input
float horizontal = CrossPlatformInputManager.GetAxisRaw("Horizontal");
float vertical = CrossPlatformInputManager.GetAxisRaw("Vertical");
bool waswalking = m_IsWalking;
#if !MOBILE_INPUT
// On standalone builds, walk/run speed is modified by a key press.
// keep track of whether or not the character is walking or running
m_IsWalking = !Input.GetKey(KeyCode.LeftShift);
#endif
// set the desired speed to be walking or running
speed = m_IsWalking ? m_WalkSpeed : m_RunSpeed;
m_Input = new Vector2(horizontal, vertical);
// normalize input if it exceeds 1 in combined length:
if (m_Input.sqrMagnitude > 1)
{
m_Input.Normalize();
}
// handle speed change to give an fov kick
// only if the player is going to a run, is running and the fovkick is to be used
if (m_IsWalking != waswalking && m_UseFovKick && m_CharacterController.velocity.sqrMagnitude > 0)
{
StopAllCoroutines();
StartCoroutine(!m_IsWalking ? m_FovKick.FOVKickUp() : m_FovKick.FOVKickDown());
}
}
private void RotateView()
{
#if !MOBILE_INPUT
m_MouseLook.LookRotation (transform, m_Camera.transform);
m_Camera.transform.localEulerAngles = new Vector3 (-m_MouseLook.y, m_Camera.transform.localEulerAngles.y,
m_Camera.transform.localEulerAngles.z);
transform.localEulerAngles = new Vector3 (0, m_MouseLook.x, 0);
#else
Vector2 m_MouseLook= new Vector2( CrossPlatformInputManager.GetAxisRaw("HorizontalLook")
,CrossPlatformInputManager.GetAxisRaw("VerticalLook"));
float Camx=m_Camera.transform.localEulerAngles.x;
if((Camx>280 && Camx<=360) ||
(Camx >=0 && Camx<80) ||
(Camx>=80 && Camx<180 && m_MouseLook.y>0) ||
(Camx>180 && Camx<=280 && m_MouseLook.y<0))
{
m_Camera.transform.localEulerAngles += new Vector3 (-m_MouseLook.y*m_LookSpeed, m_Camera.transform.localEulerAngles.y,
m_Camera.transform.localEulerAngles.z);
// AK.transform.localEulerAngles=m_Camera.transform.localEulerAngles;
}
transform.localEulerAngles += new Vector3 (0, m_MouseLook.x*m_LookSpeed, 0);
#endif
m_YRotation = m_MouseLook.y;
}
private void OnControllerColliderHit(ControllerColliderHit hit)
{
Rigidbody body = hit.collider.attachedRigidbody;
//dont move the rigidbody if the character is on top of it
if (m_CollisionFlags == CollisionFlags.Below)
{
return;
}
if (body == null || body.isKinematic)
{
return;
}
body.AddForceAtPosition(m_CharacterController.velocity*0.1f, hit.point, ForceMode.Impulse);
}
}
}
Joystick cod-------------///////////////
using System;
using UnityEngine;
using UnityEngine.EventSystems;
namespace UnityStandardAssets.CrossPlatformInput
{
public class Joystick : MonoBehaviour, IPointerDownHandler, IPointerUpHandler, IDragHandler
{
public enum AxisOption
{
// Options for which axes to use
Both, // Use both
OnlyHorizontal, // Only horizontal
OnlyVertical // Only vertical
}
public int MovementRange = 100;
public AxisOption axesToUse = AxisOption.Both; // The options for the axes that the still will use
public string horizontalAxisName = "Horizontal"; // The name given to the horizontal axis for the cross platform input
public string verticalAxisName = "Vertical"; // The name given to the vertical axis for the cross platform input
Vector3 m_StartPos;
bool m_UseX; // Toggle for using the x axis
bool m_UseY; // Toggle for using the Y axis
CrossPlatformInputManager.VirtualAxis m_HorizontalVirtualAxis; // Reference to the joystick in the cross platform input
CrossPlatformInputManager.VirtualAxis m_VerticalVirtualAxis; // Reference to the joystick in the cross platform input
void OnEnable()
{
CreateVirtualAxes();
}
void Start()
{
m_StartPos = transform.position;
}
void UpdateVirtualAxes(Vector3 value)
{
var delta = m_StartPos - value;
delta.y = -delta.y;
delta /= MovementRange;
if (m_UseX)
{
m_HorizontalVirtualAxis.Update(-delta.x);
}
if (m_UseY)
{
m_VerticalVirtualAxis.Update(delta.y);
}
}
void CreateVirtualAxes()
{
// set axes to use
m_UseX = (axesToUse == AxisOption.Both || axesToUse == AxisOption.OnlyHorizontal);
m_UseY = (axesToUse == AxisOption.Both || axesToUse == AxisOption.OnlyVertical);
// create new axes based on axes to use
if (m_UseX)
{
m_HorizontalVirtualAxis = new CrossPlatformInputManager.VirtualAxis(horizontalAxisName);
CrossPlatformInputManager.RegisterVirtualAxis(m_HorizontalVirtualAxis);
}
if (m_UseY)
{
m_VerticalVirtualAxis = new CrossPlatformInputManager.VirtualAxis(verticalAxisName);
CrossPlatformInputManager.RegisterVirtualAxis(m_VerticalVirtualAxis);
}
}
public void OnDrag(PointerEventData data)
{
Vector3 newPos = Vector3.zero;
if (m_UseX)
{
int delta = (int)(data.position.x - m_StartPos.x);
delta = Mathf.Clamp(delta, - MovementRange, MovementRange);
newPos.x = delta;
}
if (m_UseY)
{
int delta = (int)(data.position.y - m_StartPos.y);
delta = Mathf.Clamp(delta, -MovementRange, MovementRange);
newPos.y = delta;
}
transform.position = new Vector3(m_StartPos.x + newPos.x, m_StartPos.y + newPos.y, m_StartPos.z + newPos.z);
UpdateVirtualAxes(transform.position);
}
public void OnPointerUp(PointerEventData data)
{
transform.position = m_StartPos;
UpdateVirtualAxes(m_StartPos);
}
public void OnPointerDown(PointerEventData data) { }
void OnDisable()
{
// remove the joysticks from the cross platform input
if (m_UseX)
{
m_HorizontalVirtualAxis.Remove();
}
if (m_UseY)
{
m_VerticalVirtualAxis.Remove();
}
}
}
}
Shooting Bullet
#pragma strict
//---------------------------------------------------------
//---------------------------------------------------------
var Mig29_Fulcrum:GameObject;
var AK:Rigidbody;
var muzzel:GameObject;
var bullet:Rigidbody;
var Speed:int=700;
private var Fire:int=0;
private var Shooting:boolean =false;
private var gun:GameObject;
private var fireDelay:float = 0f; // the delay between shots
function Start () {
gun=GameObject.FindWithTag("MyGun");
//var AkGun:Rigidbody=Instantiate(AK,this.transform.position,this.transform.rotation);
//var b:Rigidbody=Instantiate(bullet,this.transform.position,this.transform.rotation);
}
function Update () {
if(Input.GetButtonDown("Fire2"))
{
//gun.GetComponent.<Animation>().Play("Jump");
Shooting=true;
}
if(Input.GetButtonDown("Fire1"))
{
Shooting=false;
}
if(Shooting)
{
AutoFire(0.08f);
}
}
function FireWeapon()
{
Debug.Log("Fire!!!");
GetComponent.<AudioSource>().Play();
gun.GetComponent.<Animation>().Play("GunFire");
var m:GameObject=Instantiate(muzzel,this.transform.position,this.transform.rotation);
m.transform.Rotate(Vector3(-90,0,0));
m.transform.Rotate(Vector3(0,Random.Range(0,180),0));
Destroy(m,0.015);
var b:Rigidbody=Instantiate(bullet,this.transform.position,this.transform.rotation);
b.AddForce(this.transform.forward*Speed);
}
// The function below activates the FireWeapon function
function AutoFire( fireRate:float)
{
if (fireDelay < fireRate)
{
fireDelay += Time.deltaTime;
}
if (fireDelay >= fireRate)
{
FireWeapon();
fireDelay = 0f;
}
}
Judging by the video, the problem is that your bullets are fired based on the position and orientation of the camera, not the gun. This is normally preferable in most first person shooter games (eg. Counter-Strike and similar), but maybe not in your case (since you're using mobile joystick controls).
This behavior is caused by the following code:
var b:Rigidbody=Instantiate(bullet,this.transform.position,this.transform.rotation);
b.AddForce(this.transform.forward*Speed);
Since you're using the position and rotation of this.transform, the script refers to the transform of the camera you attached the script to.
If you wish your game to behave differently, you should consider referring to the transform component of the gun instead (conveniently, in your cased stored in the variable gun):
var b:Rigidbody=Instantiate(bullet,gun.transform.position,gun.transform.rotation);
b.AddForce(gun.transform.forward*Speed);
Note that this might not work if your gun's anchor isn't at the muzzle and its forward vector doesn't point in the direction you want the bullet to come out. In that case, you might want to add to the gun an empty child GameObject that's properly positioned and oriented.

Camera onPreviewFrame not called on XT910

I have a problem with the camera of my MOTO XT910 android phone. I would like only to access to the camera's (frame)buffer but "onPreviewFrame" is never called. On my emulator, it works fine.
Thanks in advance,
My code is as below:
public class Store extends SurfaceView implements SurfaceHolder.Callback, PreviewCallback {
static {
System.loadLibrary("hello-jni");
}
public native void decode(Bitmap pTarget, byte[] pSource);
private Camera mCamera;
private byte[] mVideoSource;
private Bitmap mBackBuffer;
private Paint mPaint;
private Size lSize;
private static String TAG = Store.class.getCanonicalName();
public Store(Context context) {
super(context);
getHolder().addCallback(this);
setWillNotDraw(false);
}
public void surfaceCreated(SurfaceHolder holder) {
try {
mCamera = Camera.open();
mCamera.setDisplayOrientation(0);
mCamera.setPreviewDisplay(null);
mCamera.setPreviewCallbackWithBuffer(this);
Log.d(TAG,"surfaceCreated ok");
} catch (IOException eIOException) {
mCamera.release();
mCamera = null;
Log.d(TAG,"surfaceCreated failed");
throw new IllegalStateException();
}
}
public void surfaceChanged(SurfaceHolder pHolder, int pFormat, int pWidth, int pHeight) {
Log.d(TAG,"surfaceChanged in");
mCamera.stopPreview();
lSize = findBestResolution(pWidth, pHeight);
invalidate();
PixelFormat lPixelFormat = new PixelFormat();
PixelFormat.getPixelFormatInfo(mCamera.getParameters()
.getPreviewFormat(), lPixelFormat);
int lSourceSize = lSize.width * lSize.height * lPixelFormat.bitsPerPixel / 8;
mVideoSource = new byte[lSourceSize];
mBackBuffer = Bitmap.createBitmap(lSize.width, lSize.height,Bitmap.Config.ARGB_8888);
Camera.Parameters lParameters = mCamera.getParameters();
lParameters.setPreviewSize(lSize.width, lSize.height);
lParameters.setPreviewFormat(PixelFormat.YCbCr_420_SP);
mCamera.setParameters(lParameters);
mCamera.addCallbackBuffer(mVideoSource);
mCamera.startPreview();
Log.d(TAG,"surfaceChanged out");
}
private Size findBestResolution(int pWidth, int pHeight) {
List<Size> lSizes = mCamera.getParameters().getSupportedPreviewSizes();
Size lSelectedSize = mCamera.new Size(0, 0);
for (Size lSize : lSizes) {
if ((lSize.width <= pWidth) && (lSize.height <= pHeight)
&& (lSize.width >= lSelectedSize.width)
&& (lSize.height >= lSelectedSize.height)) {
lSelectedSize = lSize;
}
}
if ((lSelectedSize.width == 0) || (lSelectedSize.height == 0)) {
lSelectedSize = lSizes.get(0);
}
return lSelectedSize;
}
public void surfaceDestroyed(SurfaceHolder holder) {
if (mCamera != null) {
Log.d(TAG,"surfaceDestroyed");
mCamera.stopPreview();
mCamera.release();
mCamera = null;
mVideoSource = null;
mBackBuffer = null;
}
}
public void onPreviewFrame(byte[] pData, Camera pCamera) {
Log.d(TAG,"onPreviewFrame");
decode(mBackBuffer, pData);
invalidate();
}
#Override
protected void onDraw(Canvas pCanvas) {
Log.d(TAG,"onDraw in");
if (mCamera != null) {
Paint paint = new Paint();
paint.setColor(Color.YELLOW);
String text = String.format("%dx%d", lSize.width, lSize.height);
pCanvas.drawText(text, 10, 10, paint);
pCanvas.drawLine(0, 0, 100, 100, paint);
pCanvas.drawLine(20, 0, 0, 20, paint);
pCanvas.drawBitmap(mBackBuffer, 0, 0, mPaint);
mCamera.addCallbackBuffer(mVideoSource);
Log.d(TAG,"onDraw out");
}
}
}
First I assume you didn't get an exception and that the camera parameters you used (PixelFormat.YCbCr_420_SP) is supported by your camera.
To fix the problem you should try to add mCamera.setPreviewDisplay(view);
with a surface view and not with null. Unfortunately according to android guys the preview should not start without a surface view :
Unfortunately, there's no way to get preview frames without having a SurfaceView to display them, if you're targeting pre-3.0 devices. On 3.0 and later, you can use the Camera#setPreviewTexture() method to send preview data to a GPU texture. Even if just want to use the YUV data instead of RGB data in the GPU, this would allow you to stream without a mandatory UI element; just ignore the SurfaceTexture entirely.
If you stick with your current approach, you'll unfortunately find that your app will not function properly on many devices; there are several that follow the API's restriction on having to have setPreviewDisplay() or setPreviewTexture() set before preview data will be sent.
See the full bug report here : http://code.google.com/p/android/issues/detail?id=28238
However as I said in this bug report it can work without a surface view, but there is no guarantee.