FMOD pcmreadcallback never is called while playing audio from a microphone source - unity3d

I'm writing a Unity class to capture and playback audio data from a microphone. Playback part works fine I can hear my voice in the headphones but I cannot access audio samples because pcmsetposcallback is never called during playback. It is called only once inside createSound method. I think i'm missing some setting , also tried several OR combinations for FMOD.MODE flag but with no luck.
I'm using fmodstudio10510.unitypackage and testing under windows 7 but it should have fully croos-platform support.
Thanks in advance.
Walter
public class AudioInit : MonoBehaviour {
FMOD.System lowlevel = null;
FMOD.Sound snd = null;
// callbacks delegates
FMOD.SOUND_PCMREADCALLBACK pcmreadcallbackPtr = new FMOD.SOUND_PCMREADCALLBACK (pcmreadcallbackFunc);
int driverId;
void Start () {
int channels = 1;
int sampleRate = 8000;
float recordTime = 1.0f;
// get low level instance
FMOD_StudioSystem.instance.System.getLowLevelSystem(out lowlevel);
// fill sound info struct
FMOD.CREATESOUNDEXINFO soundInfo = new FMOD.CREATESOUNDEXINFO ();
soundInfo.cbsize = System.Runtime.InteropServices.Marshal.SizeOf (typeof(FMOD.CREATESOUNDEXINFO));
soundInfo.length = (uint)(sampleRate * channels * sizeof(byte) * recordTime);
soundInfo.numchannels = channels;
soundInfo.defaultfrequency = sampleRate;
soundInfo.format = FMOD.SOUND_FORMAT.PCM8;
soundInfo.pcmreadcallback = pcmreadcallbackPtr;
soundInfo.pcmsetposcallback = pcmsetposcallbackPtr;
soundInfo.dlsname = IntPtr.Zero;
// FMODE MODE flag
FMOD.MODE mode = FMOD.MODE.OPENUSER | FMOD.MODE.LOOP_NORMAL;
// create sound
FMOD.RESULT res = lowlevel.createSound((string)null, mode, ref soundInfo, out snd);
if (res != FMOD.RESULT.OK) {
Debug.Log ("ERROR snd " + res.ToString ());
return;
}
// get driver
res = lowlevel.getDriver (out driverId);
if (res != FMOD.RESULT.OK) {
Debug.Log ("ERROR getDriver " + res.ToString ());
return;
}
// start record from microphone
res = lowlevel.recordStart (driverId, snd, true);
if (res != FMOD.RESULT.OK) {
Debug.Log ("ERROR recordStart " + res.ToString ());
return;
}
uint pos = 0;
uint tries = 10;
// wait for a valid record position
while ( !(pos > 0) && (tries--) > 0 ) {
if ( lowlevel.getRecordPosition(driverId, out pos) == FMOD.RESULT.OK ){
System.Threading.Thread.Sleep(100);
} else { break; }
}
if ( !( pos > 0 )) {
Debug.Log ("ERROR invalid record position");
return;
}
// start playback
FMOD.Channel chn;
res = lowlevel.playSound (snd, new FMOD.ChannelGroup (IntPtr.Zero), false, out chn);
if (res != FMOD.RESULT.OK) {
Debug.Log ("ERROR recordStart " + res.ToString ());
return;
}
}
// only called once during lowlevel.createSound execution
static FMOD.RESULT pcmreadcallbackFunc (IntPtr sound, IntPtr data, uint len){
Debug.Log("pcmreadcallback sample size " + len.ToString());
return FMOD.RESULT.OK;
}
// Update is called once per frame
void Update () {
}
}

The recording system doesn't go via pcmreadcallback, that's why you aren't getting those callbacks.
To access the microphone data use Sound::lock and Sound::unlock.

Related

Poor screen share using Agora Virtual Camera Prefab from oculus

I'm currently working on an unity project, where I'm trying to screenshare my content from oculus quest 2 to a remote pc with audio chat enabled using Agora Virtual Camera Prefab package. I actually followed this blog for the implementation https://www.agora.io/en/blog/how-to-build-a-vr-video-chat-app-using-unitys-xr-framework/. I successfully implemented this and connected with a remote pc, but all I'm experiencing is a very poor screenshare of oculus(most of the times black screen) in the remote pc. But audio is better only a lag of 1 or 2 sec, plus even I'm able to see the remote pc user's cam video inside my game through oculus. It'll be lot helpful if anyone could help me with this issue, I'm totally stuck with this. I'm also attaching screenshot of the agora virtual camera prefab's config and the code used for any suggestions.
using System.Collections;
using agora_gaming_rtc;
using UnityEngine;
using UnityEngine.UI;
using static agora_gaming_rtc.ExternalVideoFrame;
using agora_utilities;
using System.Collections.Generic;
#if (UNITY_2018_3_OR_NEWER && UNITY_ANDROID)
using UnityEngine.Android;
#endif
public class AgoraVirtualCamera : MonoBehaviour
{
// Use this for initialization
#if (UNITY_2018_3_OR_NEWER && UNITY_ANDROID)
private ArrayList permissionList = new ArrayList();
#endif
// PLEASE KEEP THIS App ID IN SAFE PLACE
// Get your own App ID at https://dashboard.agora.io/
[Header("Agora Config")]
[SerializeField]
private string AppID = "";
[SerializeField]
private string TempToken = "";
[SerializeField]
private string TokenServerURL = "";
[SerializeField]
private string ChannelName = "";
[Header("Env Config")]
[SerializeField]
private Camera VirtualCam;
[SerializeField]
private GameObject RemoteVideoRoot;
[SerializeField]
private GameObject RemoteScreenVideoRoot;
/*[SerializeField]
private int ScreenShareUID;*/
[SerializeField]
private Text LogText;
[Header("UI Btn Config")]
public GameObject JoinBtn;
public GameObject LeaveBtn;
public GameObject MicBtn;
public GameObject QuitBtn;
public Color ActiveMicColor = Color.green;
public Color DisabledMicColor = Color.red;
[Header("Video Encoder Config")]
[SerializeField]
private VideoDimensions dimensions = new VideoDimensions
{
width = 1280,
height = 720
};
[SerializeField]
private int bitrate = 1130;
[SerializeField]
private FRAME_RATE frameRate = FRAME_RATE.FRAME_RATE_FPS_30;
[SerializeField]
private VIDEO_MIRROR_MODE_TYPE mirrorMode = VIDEO_MIRROR_MODE_TYPE.VIDEO_MIRROR_MODE_DISABLED;
// use bitrate: 2260 for broadcast mode
// Pixel format
public static TextureFormat ConvertFormat = TextureFormat.RGBA32;
public static VIDEO_PIXEL_FORMAT PixelFormat = VIDEO_PIXEL_FORMAT.VIDEO_PIXEL_RGBA;
private static int ShareCameraMode = 1; // 0 = unsafe buffer pointer, 1 = renderer image
// used for setting frame order
int timeStampCount = 0; // monotonic timestamp counter
// perspective camera buffer
private Texture2D BufferTexture;
// output log
private Logger logger;
// uid
private uint UID = 0; // 0 tells the agora engine to generate the uid
// reference to the active agora client
static AgoraInterface client = null;
// keep track of remote UID
Dictionary<string, List<uint>> RemoteUIDs = new Dictionary<string, List<uint>>();
// keep track of channel state
bool InChannel = false;
#region --- Life Cycles ---
void Awake()
{
// keep this alive across scenes
//DontDestroyOnLoad(this.gameObject);
}
// Start is called before the first frame update
void Start()
{
CheckAppId();// ensure an AppID is defined
// if there isn't a join button defined, autojoin
if (JoinBtn == null || !JoinBtn.activeInHierarchy)
{
JoinChannel();
}
}
// Update is called once per frame
void Update()
{
PermissionHelper.RequestMicrophontPermission();
PermissionHelper.RequestCameraPermission();
}
void OnDisable()
{
LeaveChannel();
}
void OnApplicationPause(bool paused)
{
if (client != null)
{
client.EnableVideo(paused);
client.EnableAudio(paused);
}
}
void OnApplicationQuit()
{
ShareCameraMode = 0;
if (client != null)
{
client.Leave();
client.UnloadEngine();
}
}
#endregion
#region --- Agora Functions ---
void ReloadAgoraEngine()
{
client = GetComponent<AgoraInterface>();
if (client != null)
{
client.Leave();
client.UnloadEngine();
Destroy(client);
client = null;
}
client = gameObject.AddComponent<AgoraInterface>();
client.SetLogger(logger);
// video config
VideoEncoderConfiguration videoEncodeConfig = new VideoEncoderConfiguration
{
dimensions = this.dimensions,
frameRate = this.frameRate,
bitrate = this.bitrate,
orientationMode = ORIENTATION_MODE.ORIENTATION_MODE_FIXED_LANDSCAPE,
mirrorMode = this.mirrorMode
};
client.SetVideoEncoderConfig(videoEncodeConfig);
}
// agora functions
public void JoinChannel()
{
// clean up and create a new one
ReloadAgoraEngine();
string appidMSG = string.Format("Initializing client with appid: ${0}", AppID);
logger.UpdateLog(appidMSG);
client.LoadEngine(AppID); // load engine
// Set up the texture for rendering POV as a texture
if (VirtualCam.isActiveAndEnabled)
{
logger.UpdateLog("Virtual Camera is Active and Enabled, Enable custom video source");
client.CustomVideo = true;
int width = Screen.width;
int height = Screen.height;
}
AddCallbackEvents(); // add custom event handling
if (TokenServerURL != "")
{
client.JoinWithTokenServer(ChannelName, UID, TokenServerURL);
}
else
{
// joing with or without a token
client.Join(ChannelName, TempToken, UID);
string joiningChannelMsg = string.Format("Joining channel: {0}, with uid: {1}", ChannelName, UID);
logger.UpdateLog(joiningChannelMsg);
}
}
public void LeaveChannel()
{
if (client != null)
{
client.Leave();
}
DisableSharing();
InChannel = false;
// change mic buttn text and color - help user visualize they left the channel
if (MicBtn != null)
{
MicBtn.GetComponentInChildren<Text>().text = "MIC";
MicBtn.GetComponent<Image>().color = Color.white;
}
// remove the remote video planes
if (gameObject.activeInHierarchy)
{
if (RemoteVideoRoot?.transform.childCount > 0)
{
foreach (Transform child in RemoteVideoRoot.transform)
{
GameObject.Destroy(child.gameObject);
}
StartCoroutine(UiUpdate(0.5f));
}
}
}
public void ToggleMic()
{
if (!InChannel)
return; // only toggle mic when in a channel
Text MicBtnText = MicBtn.GetComponentInChildren<Text>();
Image micBtnImg = MicBtn.GetComponent<Image>();
if (micBtnImg.color == Color.green)
{
client.MuteLocalAudioStream(true);
MicBtnText.text = "Mic OFF";
micBtnImg.color = DisabledMicColor;
}
else if (micBtnImg.color == Color.red)
{
client.MuteLocalAudioStream(false);
MicBtnText.text = "Mic ON";
micBtnImg.color = ActiveMicColor;
}
else
{
client.MuteLocalAudioStream(true); // mute by default
MicBtnText.text = "- MUTED -";
MicBtnText.color = Color.white;
micBtnImg.color = DisabledMicColor;
}
}
// Called by quit button
public void ExitApp()
{
#if UNITY_EDITOR
// Application.Quit() does not work in the editor so
// UnityEditor.EditorApplication.isPlaying need to be set to false to end the game
UnityEditor.EditorApplication.isPlaying = false;
#else
Application.Quit();
#endif
}
#endregion
#region --- Callback handlers ---
protected virtual void AddCallbackEvents()
{
IRtcEngine mRtcEngine = IRtcEngine.QueryEngine();
mRtcEngine.OnJoinChannelSuccess += OnJoinChannelSuccess;
mRtcEngine.OnUserJoined += OnUserJoined;
mRtcEngine.OnUserOffline += OnUserOffline;
}
public void OnJoinChannelSuccess(string channelName, uint uid, int elapsed)
{
InChannel = true;
if (VirtualCam != null && VirtualCam.isActiveAndEnabled)
{
logger.UpdateLog("Enable Virtual Camera Sharing");
EnableVirtualCameraSharing();
}
else
{
logger.UpdateLog("ERROR: Failed to find perspective camera.");
}
// update mic button color and text - visually show joined channel
if (MicBtn != null)
{
MicBtn.GetComponentInChildren<Text>().text = "MIC ON";
MicBtn.GetComponent<Image>().color = ActiveMicColor;
}
// enable dual stream mode
IRtcEngine mRtcEngine = IRtcEngine.QueryEngine();
mRtcEngine.EnableDualStreamMode(true);
}
public void OnUserJoined(uint uid, int elapsed)
{
// add video streams from all users in the channel
// offset the new video plane based on the parent's number of children.
//float xOffset = RemoteVideoRoot.transform.childCount * 3.5f;
//MakeVideoView(uid, RemoteVideoRoot, new Vector3(xOffset, 0, 0), Quaternion.Euler(270, 0, 0));
// to restrict which user video streams appear
// only show users with uid 100-1009 or 49024 (screen share)",
// uid 49024 is an arbitrary number that was selected and hardcoded as uid for the screen share stream from the web demo code. This uid can be customized
string remoteUIDtype;
if (uid >= 1000 && uid <= 1009)
{
// offset the new video plane based on the parent's number of children.
float xOffset = RemoteVideoRoot.transform.childCount * -3.69f;
MakeVideoView(uid, RemoteVideoRoot, new Vector3(xOffset, 0, 0), Quaternion.Euler(270, 180, 0), new Vector3(1.0f, 1.0f, 0.5625f));
remoteUIDtype = "admin";
} else if (uid == 49024 && RemoteScreenVideoRoot != null)
{
MakeVideoView(uid, RemoteScreenVideoRoot, new Vector3(0, 0, 0), Quaternion.Euler(270, 0, 0), new Vector3(-1.777f,-1.0f, -1.0f));
remoteUIDtype = "screen";
}
else
{
IRtcEngine mRtcEngine = IRtcEngine.QueryEngine();
// unsubscribe from video & audio streams
mRtcEngine.MuteRemoteVideoStream(uid, true);
mRtcEngine.MuteRemoteAudioStream(uid, true);
remoteUIDtype = "peer";
}
// keep track of the remote uids
logger.UpdateLog($"Make Remote Video UID type:{remoteUIDtype}");
if (RemoteUIDs.ContainsKey(remoteUIDtype))
{
RemoteUIDs[remoteUIDtype].Add(uid);
} else {
RemoteUIDs.Add(remoteUIDtype, new List<uint> { uid });
}
}
public void OnUserOffline(uint uid, USER_OFFLINE_REASON reason)
{
logger.UpdateLog("onUserOffline: update UI");
// update the position of the remaining children
StartCoroutine(UiUpdate(0.5f));
}
#endregion
#region --- misc helper functions ---
public void SetResolution(VideoDimensions newDimensions, int newBitrate)
{
dimensions = newDimensions;
bitrate = newBitrate;
VideoEncoderConfiguration videoEncodeConfig = new VideoEncoderConfiguration
{
dimensions = this.dimensions,
frameRate = this.frameRate,
bitrate = this.bitrate,
orientationMode = ORIENTATION_MODE.ORIENTATION_MODE_FIXED_LANDSCAPE,
mirrorMode = this.mirrorMode
};
client.SetVideoEncoderConfig(videoEncodeConfig);
}
private void CheckAppId()
{
logger = new Logger(LogText);
logger.DebugAssert(AppID.Length > 10, "Please fill in your AppId"); // Checks that AppID is set.
}
private void MakeVideoView(uint uid, GameObject parentNode, Vector3 position, Quaternion rotation, Vector3 scale)
{
logger.UpdateLog(string.Format("Make Remote Video View for UID: {0}.", uid));
GameObject go = GameObject.Find(uid.ToString());
if (go != null)
{
return; // reuse
}
// create a GameObject and assign to this new user
VideoSurface videoSurface = makePlaneSurface(uid.ToString(), parentNode, position, rotation, scale);
if (videoSurface != null)
{
// configure videoSurface
videoSurface.SetForUser(uid);
videoSurface.SetEnable(true);
videoSurface.SetVideoSurfaceType(AgoraVideoSurfaceType.Renderer);
videoSurface.SetGameFps(30);
}
}
// VIDEO TYPE 1: 3D Object
public VideoSurface makePlaneSurface(string goName, GameObject parentNode, Vector3 position, Quaternion rotation, Vector3 scale)
{
GameObject go = GameObject.CreatePrimitive(PrimitiveType.Plane);
if (go == null)
{
return null;
}
go.name = goName;
go.transform.localScale = scale; // scale the video (4:3)
if (parentNode != null)
{
go.transform.parent = parentNode.transform;
go.transform.localPosition = position;
go.transform.localRotation = rotation;
Debug.Log("add video view");
}
else
{
Debug.Log("parentNode is null video view");
go.transform.localPosition = new Vector3(0, 0, 0f);
go.transform.localRotation = Quaternion.Euler(270, 0, 0);
}
// configure videoSurface
VideoSurface videoSurface = go.AddComponent<VideoSurface>();
return videoSurface;
}
IEnumerator UiUpdate(float time)
{
yield return new WaitForSeconds(time);
// update the UI
for (int i = 0; i < RemoteVideoRoot.transform.childCount; i++)
{
float xOffset = -1 * i * 3.69f; // calculate the new position
RemoteVideoRoot.transform.GetChild(i).localPosition = new Vector3(xOffset, 0, 0); // update the position
}
}
#endregion
#region --- Virtual Camera video frame sharing ---
void EnableVirtualCameraSharing()
{
RenderTexture renderTexture = VirtualCam.targetTexture;
if (renderTexture != null)
{
BufferTexture = new Texture2D(renderTexture.width, renderTexture.height, ConvertFormat, false);
StartCoroutine(CoShareRenderData()); // use co-routine to push frames into the Agora stream
} else
{
logger.UpdateLog("Error: No Render Texture Found. Check Virtual Camera.");
}
}
void DisableSharing()
{
BufferTexture = null;
}
IEnumerator CoShareRenderData()
{
while (ShareCameraMode == 1)
{
yield return new WaitForEndOfFrame();
ShareRenderTexture();
}
yield return null;
}
private void ShareRenderTexture()
{
if (BufferTexture == null) // offlined
{
return;
}
Camera targetCamera = VirtualCam; // AR Camera
RenderTexture.active = targetCamera.targetTexture; // the targetTexture holds render texture
Rect rect = new Rect(0, 0, targetCamera.targetTexture.width, targetCamera.targetTexture.height);
BufferTexture.ReadPixels(rect, 0, 0);
BufferTexture.Apply();
byte[] bytes = BufferTexture.GetRawTextureData();
// sends the Raw data contained in bytes
//monoProxy.StartCoroutine(PushFrame(bytes, (int)rect.width, (int)rect.height,
//() =>
//{
// bytes = null;
//}));
StartCoroutine(PushFrame(bytes, (int)rect.width, (int)rect.height,
() =>
{
bytes = null;
}));
RenderTexture.active = null;
}
/// <summary>
/// Push frame to the remote client. This is the same code that does ScreenSharing.
/// </summary>
/// <param name="bytes">raw video image data</param>
/// <param name="width"></param>
/// <param name="height"></param>
/// <param name="onFinish">callback upon finish of the function</param>
/// <returns></returns>
IEnumerator PushFrame(byte[] bytes, int width, int height, System.Action onFinish)
{
if (bytes == null || bytes.Length == 0)
{
Debug.LogError("Zero bytes found!!!!");
yield break;
}
IRtcEngine rtc = IRtcEngine.QueryEngine();
//if the engine is present
if (rtc != null)
{
//Create a new external video frame
ExternalVideoFrame externalVideoFrame = new ExternalVideoFrame();
//Set the buffer type of the video frame
externalVideoFrame.type = ExternalVideoFrame.VIDEO_BUFFER_TYPE.VIDEO_BUFFER_RAW_DATA;
// Set the video pixel format
externalVideoFrame.format = PixelFormat; // VIDEO_PIXEL_RGBA
//apply raw data you are pulling from the rectangle you created earlier to the video frame
externalVideoFrame.buffer = bytes;
//Set the width of the video frame (in pixels)
externalVideoFrame.stride = width;
//Set the height of the video frame
externalVideoFrame.height = height;
//Remove pixels from the sides of the frame
externalVideoFrame.cropLeft = 10;
externalVideoFrame.cropTop = 10;
externalVideoFrame.cropRight = 10;
externalVideoFrame.cropBottom = 10;
//Rotate the video frame (0, 90, 180, or 270)
externalVideoFrame.rotation = 180;
// increment i with the video timestamp
//externalVideoFrame.timestamp = System.DateTime.Now.Ticks;
externalVideoFrame.timestamp = timeStampCount++;
//Push the external video frame with the frame we just created
int a = 0;
rtc.PushVideoFrame(externalVideoFrame);
if (timeStampCount % 100 == 0) Debug.Log(" pushVideoFrame(" + timeStampCount + ") size:" + bytes.Length + " => " + a);
}
yield return null;
onFinish();
}
#endregion
}

STM32 I2S with DMA playing slow

I want to implement a WAV/MP3 player(now let's just say WAV) with an STM32, it reads it from the SD with FATFS, then transfer it to the I2S buffer with DMA.
The problem is that when I plug my speakers, it plays the song at the correct pitch(tone) but very slowly, and it also does an strange repetitive tick, is like the buffer is filled slowly, or played slowly, but with the correct frequencies.
I use an STM32_F4VE(STM32F407VET6) with a PCM5102(only needs DATA, BCK at 32xfs for 16bits, and LRCK at fs)
Here I attach a few code parts:
main.c
if(res = f_mount(&SDFatFS, SDPath, 1) == FR_OK){ //If SD card is correctly mounted
HAL_UART_Transmit_IT(&huart1, "SD montada correctamente\n\r", strlen("SD montada correctamente\n\r"));
if(wavPlayer_fileSelect("test.wav") == 0){ //If the wav file wasn't correstly opened
HAL_UART_Transmit_IT(&huart1, "Error al abrir el WAV\n\r", strlen("Error al abrir el WAV\n\r"));
}
else
{
HAL_UART_Transmit_IT(&huart1, "WAV abierto\n\r", strlen("WAV abierto\n\r"));
wavPlayer_play();
HAL_UART_Transmit_IT(&huart1, "WAV PLAY\n\r", strlen("WAV PLAY\n\r"));
isPlaying = true;
}
}
else
{
HAL_UART_Transmit_IT(&huart1, "Error al montar la SD\n\r", strlen("Error al montar la SD\n\r"));
}
wavplayer.c
/**
* #brief Select WAV file to play
* #retval returns true when file is found in USB Drive
*/
bool wavPlayer_fileSelect(const char* filePath)
{
UINT readBytes = 0;
//Open WAV file
if(f_open(&wavFile, filePath, FA_READ) != FR_OK)
{
return false;
}
//Read WAV file Header
f_read(&wavFile, &wavHeader, sizeof(wavHeader), &readBytes);
sprintf(UART_buff, "TamaƱo del archivo: %d\n\rFrecuencia de muestreo: %d\n\r", wavHeader.FileSize, wavHeader.SampleRate);
HAL_UART_Transmit_IT(&huart1, UART_buff, strlen(UART_buff));//TX Function
end_of_file_reached = false;
return true;
}
/**
* #brief WAV File Play
*/
void wavPlayer_play(void)
{
isFinished = false;
//Read Audio data from USB Disk
f_lseek(&wavFile, 0);
f_read (&wavFile, &audioBuffer[0], AUDIO_BUFFER_SIZE, &playerReadBytes);
audioRemainSize = wavHeader.FileSize - playerReadBytes;
//Start playing the WAV
HAL_I2S_Transmit_DMA(&hi2s2, (uint16_t *)&audioBuffer[0], AUDIO_BUFFER_SIZE);
}
/**
* #brief Half/Full transfer Audio callback for buffer management
*/
void HAL_I2S_TxCpltCallback(I2S_HandleTypeDef *hi2s)
{
if(hi2s->Instance == SPI2)
{
if(end_of_file_reached){
return;
}
res = f_read (&wavFile, &audioBuffer[AUDIO_BUFFER_SIZE/2], AUDIO_BUFFER_SIZE/2, &playerReadBytes);
if(audioRemainSize > (AUDIO_BUFFER_SIZE / 2))
{
audioRemainSize -= playerReadBytes;
}
else
{
audioRemainSize = 0;
end_of_file_reached = true;
}
}
}
void HAL_I2S_TxHalfCpltCallback(I2S_HandleTypeDef *hi2s)
{
if(hi2s->Instance == SPI2)
{
if(end_of_file_reached){
return;
}
res = f_read (&wavFile, &audioBuffer[0], AUDIO_BUFFER_SIZE/2, &playerReadBytes);
if(audioRemainSize > (AUDIO_BUFFER_SIZE / 2))
{
audioRemainSize -= playerReadBytes;
}
else
{
audioRemainSize = 0;
end_of_file_reached = true;
}
}
}

Unity TCP server freezing

I'm trying to get tcp server working in unity, and it's kinda working. My systems purpose is to read data from another program that sends me bytes and draws me a picture of that data. Main problem is that when I run it, it works for a while (random time) and then whole unity freezes and I need to kill it from the task manager.
void Start()
{
Display1 = gameObject.GetComponent<Renderer>();
mymat = GetComponent<Renderer>().material;
packetReady = false;
tcpListenerThread = new Thread(new ThreadStart(ListenForIncommingRequests));
tcpListenerThread.IsBackground = true;
tcpListenerThread.Start();
bytes = new byte[1024];
tex = new Texture2D(800, 1280, TextureFormat.RGB24, false);
firstTime = true;
ArrayInit(3072060);
packetLength = 3072060;
myThread = new Thread(Draw);
myThread.Start();
}
void Draw()
{
if (Loader == null)
{
return;
}
else if (packetReady)
{
tex.LoadRawTextureData(Loader);
tex.Apply();
mymat.SetTexture("_EmissionMap", tex);
Display1.material.mainTexture = tex;
packetReady = false;
}
}
void Update()
{
Draw();
}
private void ListenForIncommingRequests()
{
try
{
tcpListener = new TcpListener(IPAddress.Parse("127.0.0.1"), 35800);
tcpListener.Start();
while (true)
{
if (!tcpListener.Pending())
{
Thread.Sleep(100);
}
Thread.Sleep(10);
using (connectedTcpClient = tcpListener.AcceptTcpClient())
{
using (NetworkStream stream = connectedTcpClient.GetStream())
{
int length;
while ((length = stream.Read(bytes, 0, bytes.Length)) != 0)
{
if (bytes == null)
{
return;
}
else
{
ParseData(bytes);
}
}
}
}
}
}
catch (SocketException socketException)
{
Debug.Log("SocketException " + socketException.ToString());
}
finally
{
tcpListener.Stop();
}
}
I have no idea what is causing this, I've tried to solve the problem but nothing seems to work. Any suggestions?

How to capture video from web camera using unity?

I am trying to capture video from web camera using unity and hololens.
I found this example on the unity page here .
I am pasting the code below. The light on the cam turns on, however it doesnt record.
The VideoCapture.CreateAsync doesnt create a VideoCapture. So the delegate there is never executed.
I saw this thread, however that was on. On the player settings the webcam and microphone capabilities are on.
What could be the problem?
using UnityEngine;
using System.Collections;
using System.Linq;
using UnityEngine.XR.WSA.WebCam;
public class VideoCaptureExample : MonoBehaviour
{
static readonly float MaxRecordingTime = 5.0f;
VideoCapture m_VideoCapture = null;
float m_stopRecordingTimer = float.MaxValue;
// Use this for initialization
void Start()
{
StartVideoCaptureTest();
Debug.Log("Start");
}
void Update()
{
if (m_VideoCapture == null || !m_VideoCapture.IsRecording)
{
return;
}
if (Time.time > m_stopRecordingTimer)
{
m_VideoCapture.StopRecordingAsync(OnStoppedRecordingVideo);
}
}
void StartVideoCaptureTest()
{
Resolution cameraResolution = VideoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
Debug.Log(cameraResolution);
float cameraFramerate = VideoCapture.GetSupportedFrameRatesForResolution(cameraResolution).OrderByDescending((fps) => fps).First();
Debug.Log(cameraFramerate);
VideoCapture.CreateAsync(false, delegate (VideoCapture videoCapture)
{
Debug.Log("NULL");
if (videoCapture != null)
{
m_VideoCapture = videoCapture;
Debug.Log("Created VideoCapture Instance!");
CameraParameters cameraParameters = new CameraParameters();
cameraParameters.hologramOpacity = 0.0f;
cameraParameters.frameRate = cameraFramerate;
cameraParameters.cameraResolutionWidth = cameraResolution.width;
cameraParameters.cameraResolutionHeight = cameraResolution.height;
cameraParameters.pixelFormat = CapturePixelFormat.BGRA32;
m_VideoCapture.StartVideoModeAsync(cameraParameters,
VideoCapture.AudioState.ApplicationAndMicAudio,
OnStartedVideoCaptureMode);
}
else
{
Debug.LogError("Failed to create VideoCapture Instance!");
}
});
}
void OnStartedVideoCaptureMode(VideoCapture.VideoCaptureResult result)
{
Debug.Log("Started Video Capture Mode!");
string timeStamp = Time.time.ToString().Replace(".", "").Replace(":", "");
string filename = string.Format("TestVideo_{0}.mp4", timeStamp);
string filepath = System.IO.Path.Combine(Application.persistentDataPath, filename);
filepath = filepath.Replace("/", #"\");
m_VideoCapture.StartRecordingAsync(filepath, OnStartedRecordingVideo);
}
void OnStoppedVideoCaptureMode(VideoCapture.VideoCaptureResult result)
{
Debug.Log("Stopped Video Capture Mode!");
}
void OnStartedRecordingVideo(VideoCapture.VideoCaptureResult result)
{
Debug.Log("Started Recording Video!");
m_stopRecordingTimer = Time.time + MaxRecordingTime;
}
void OnStoppedRecordingVideo(VideoCapture.VideoCaptureResult result)
{
Debug.Log("Stopped Recording Video!");
m_VideoCapture.StopVideoModeAsync(OnStoppedVideoCaptureMode);
}
}
EDIT:
The problem was that the API doesnt work on the Emulator
You should try taking a look at this thread here. Where it goes into detail on how to record a video with HoloLens as well as how to take a photo. Also make sure you have the WebCam and microphone capabilities set. Also if you are trying to save it, make sure you have the Videos Library capability as well.
OnVideoCaptureCreated:
void OnVideoCaptureCreated (VideoCapture videoCapture)
{
if (videoCapture != null)
{
m_VideoCapture = videoCapture;
Resolution cameraResolution = VideoCapture.SupportedResolutions.OrderByDescending((res) => res.width * res.height).First();
float cameraFramerate = VideoCapture.GetSupportedFrameRatesForResolution(cameraResolution).OrderByDescending((fps) => fps).First();
CameraParameters cameraParameters = new CameraParameters();
cameraParameters.hologramOpacity = 0.0f;
cameraParameters.frameRate = cameraFramerate;
cameraParameters.cameraResolutionWidth = cameraResolution.width;
cameraParameters.cameraResolutionHeight = cameraResolution.height;
cameraParameters.pixelFormat = CapturePixelFormat.BGRA32;
m_VideoCapture.StartVideoModeAsync(cameraParameters,
VideoCapture.AudioState.None,
OnStartedVideoCaptureMode);
}
else
{
Debug.LogError("Failed to create VideoCapture Instance!");
}
}
OnStartVideoCaptureMode:
void OnStartedVideoCaptureMode(VideoCapture.VideoCaptureResult result)
{
if (result.success)
{
string filename = string.Format("MyVideo_{0}.mp4", Time.time);
string filepath = System.IO.Path.Combine(Application.persistentDataPath, filename);
m_VideoCapture.StartRecordingAsync(filepath, OnStartedRecordingVideo);
}
}
OnStartRecordingVideo:
void OnStartedRecordingVideo(VideoCapture.VideoCaptureResult result)
{
Debug.Log("Started Recording Video!");
// We will stop the video from recording via other input such as a timer or a tap, etc.
}
StopRecordingVideo:
// The user has indicated to stop recording
void StopRecordingVideo()
{
m_VideoCapture.StopRecordingAsync(OnStoppedRecordingVideo);
}
OnStopRecordingVideo:
void OnStoppedRecordingVideo(VideoCapture.VideoCaptureResult result)
{
Debug.Log("Stopped Recording Video!");
m_VideoCapture.StopVideoModeAsync(OnStoppedVideoCaptureMode);
}
void OnStoppedVideoCaptureMode(VideoCapture.VideoCaptureResult result)
{
m_VideoCapture.Dispose();
m_VideoCapture = null;
}

to upload two(photo and signature) image only in database from one page using c#

There is a page where I've to upload and view photo and signature (the snapshot of the page is attached )but whenever i click the view button the path to the image gets clear and the binary format of the image is saved something like 0x(only this much is saved in database nothing else) format in the database .And the 2nd thing is that both photo and signature are to be seen individually without saving in database.please suggest me a solution for this please.
code
protected void pichck()
{
string picextension = System.IO.Path.GetExtension(imgflup.PostedFile.FileName.ToString()).ToLower();
fs = imgflup.PostedFile.InputStream;
BinaryReader br = new BinaryReader(fs);
picbyte = br.ReadBytes((Int32)fs.Length);
byte[] picarray=new byte[]{255,216,255};
//Boolean match = true;
//int i;
//for (i = 0; i <= picarray.Length - 1;i=i+1 )
//{
// if(picarray[i]!=picbyte[i])
// {
// match = false;
// break;
// }
//}
//if (match == true)
//{
if (picextension == ".jpg" || picextension == ".jpeg")
{
if (imgflup.HasFile == true)
{
if (imgflup.PostedFile.ContentLength < 20000 & imgflup.PostedFile.ContentLength > 50000)
{
Response.Write("<script>alert('the file should be of size 20 mb to 50mb ')</script>");
check();
return;
}
else
{
string base64String = Convert.ToBase64String(picbyte, 0, picbyte.Length);
picimg.ImageUrl = "data:image/png;base64," + base64String;
Label1.Visible = true;
Label1.Visible = true;
Label1.Text = imgflup.PostedFile.FileName;
}
}
//}
else
{
Response.Write("<script>alert('the file should be of jpg or jpeg format')</script>");
check();
return;
}
}
else
{
Response.Write("<script>alert('Please Upload a file')</script>");
check();
return;
}
}
protected void signchckt()
{
string signextension = System.IO.Path.GetExtension(signflup.PostedFile.FileName.ToString()).ToLower();
fs1 = signflup.PostedFile.InputStream;
BinaryReader br1 = new BinaryReader(fs1);
signbyte = br1.ReadBytes((Int32)fs1.Length);
//Boolean match = true;
//int i;
//byte[] signarray=new byte[]{255,216,255};
//for (i = 0; i <= signarray.Length - 1;i=i+1 )
//{
// if(signarray[i]!=signbyte[i])
// {
// match = false;
// break;
// }
//}
//if (match == true)
//{
if (signflup.HasFile == true)
{
if (signextension == ".jpg" || signextension == ".jpeg")
{
if (signflup.PostedFile.ContentLength < 20000 & signflup.PostedFile.ContentLength > 50000)
{
Response.Write("<script>alert('the file should be of size 20 mb to 50mb ')</script>");
check1();
return;
}
else
{
string base64String = Convert.ToBase64String(signbyte, 0, signbyte.Length);
signimg.ImageUrl = "data:image/png;base64," + base64String;
Label2.Visible = true;
Label2.Visible = true;
Label2.Text =signflup.PostedFile.FileName;
}
}
else
{
Response.Write("<script>alert('The signature should be of jpg or jpeg type')</script>");
check1();
return;
}
}
//}
else
{
Response.Write("<script>alert('Please Upload a file')</script>");
check1();
return;
}
}
protected void imgviewbtn_Click(object sender, EventArgs e)
{
pichck();
}
protected void signviewbtn_Click(object sender, EventArgs e)
{
signchckt();
}
protected void updtbtn_Click(object sender, EventArgs e)
{
Int64 aplicationid = Convert.ToInt64(Session["ID"].ToString());
if (Session["ID"].ToString()=="")
{
Response.Write("<script>alert('There is no Application Id')</script>");
Response.Redirect("~/home.aspx");
}
else
{
using (obj.con)
{
pichck();
signchckt();
obj.con.Open();
obj.cmd = new SqlCommand("spPhotoandsignature",obj.con);
obj.cmd.CommandType = System.Data.CommandType.StoredProcedure;
obj.cmd.Parameters.AddWithValue("#Application_Id", aplicationid);
obj.cmd.Parameters.AddWithValue("#Pic_Scan",SqlDbType.Binary).Value =picbyte;
obj.cmd.Parameters.AddWithValue("#Pic_Size", fs);
obj.cmd.Parameters.AddWithValue("#Sign_Scan",SqlDbType.Binary).Value =signbyte;
obj.cmd.Parameters.AddWithValue("#Sign_Size", fs1);
obj.cmd.Parameters.AddWithValue("#Type", System.IO.Path.GetExtension(imgflup.PostedFile.FileName.ToString()).ToLower());
int a= obj.cmd.ExecuteNonQuery();
if (a == 1)
{
Response.Write("<script>alert('your data Submitted successfully')</script>");
Response.Redirect("~/Report.aspx");
}
}
}
}
problempage.png