Process microphone audio samples of Web Audio API - web-audio-api

I am trying to get the audio samples of my microphone via Web Audio using this code:
<!doctype html>
<html>
<meta charset="utf-8">
<body>
<script>
function start() {
let button = document.getElementById("start");
button.disabled = true;
let audioCtx = new (window.AudioContext || window.webkitAudioContext)();
navigator.mediaDevices.getUserMedia({
audio: {
echoCancellation: false,
noiseSuppression: false,
autoGainControl: false,
}
}).then(function (stream) {
let audioSource = audioCtx.createMediaStreamSource(stream);
let scriptNode = audioCtx.createScriptProcessor(4096, 1, 0);
scriptNode.onaudioprocess = function (audioProcessingEvent) {
console.log('foo!');
};
audioSource.connect(scriptNode);
}).catch(function (err) {
console.log('Error initializing user media stream: ' + err);
});
}
</script>
<button id="start" onclick="start()">Start</button>
</body>
</html>
For whatever reason this does not constantly output "foo!" in the console. What am I missing?
EDIT: It works in Firefox but not Chrome. Confusing...

Adding scriptNode.connect(audioCtx.destination) did the trick on Chrome. Processing only starts if the script node is actually connected to some sort of output.

Related

Is Ant Media Server supports live 360 video streams in HLS or DASH?

I need to know if live 360 videos are supported in HLS or DASH. That feature is really important to me.
Ant Media Server supports 360 Live Video. You can use both HLS and WebRTC with 360 Live Video.
Here is WebRTC 360 Live Video sample page:
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Ant Media Server</title>
<script src="https://aframe.io/releases/1.0.4/aframe.min.js"></script>
<script src="https://raw.githubusercontent.com/ant-media/StreamApp/master/src/main/webapp/js/webrtc_adaptor.js"></script>
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
<script src="https://code.jquery.com/jquery-3.4.1.min.js"></script>
<script>
setTimeout(function(){
$(".a-enter-vr-button").click();
},3000);
AFRAME.registerComponent('vr-mode-on',{
schema:{
tagName:{type:'string',default:"video"}
},
init:function(){
this.el.addEventListener("click",function() {
$(".a-enter-vr-button").click();
});
}
});
</script>
</head>
<body>
<a-scene inspector="https://cdn.jsdelivr.net/gh/aframevr/aframe-inspector#master/dist/aframe-inspector.min.js">
<a-box position="-10 -3 -6" rotation="45 45 45" color="red" id="play" vr-mode-on
>
</a-box>
<video id="remoteVideo" autoplay controls playsinline></video>
<a-text position="-8 3 -8" value="Hello, World!" color="red" scale="3 3 3"></a-text>
<a-plane position="0 0 -4" rotation="-90 0 0" width="4" height="4" color="#7BC8A4"></a-plane>
<a-entity distance="0.0" video-controls="src:#video"></a-entity>
<a-camera>
<a-cursor color="#FF0000"></a-cursor>
</a-camera>
<a-videosphere src="#remoteVideo" rotation="0 180 0" style="background-color: antiquewhite"></a-videosphere>
</a-scene>
<script>
let video ;
var playOrder = ["webrtc"];
var name = "antmedia";
var token = "null";
var webRTCAdaptor = null;
var streamsFolder = "streams";
initializeWebRTCPlayer(name, token);
function initializeWebRTCPlayer(name, token, noStreamCallback) {
video =document.getElementById("remoteVideo");
document.getElementById("remoteVideo").style.display = "block";
var pc_config = null;
var sdpConstraints = {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
};
var mediaConstraints = {
video: false,
audio: false
};
var appName = location.pathname.substring(0, location.pathname.lastIndexOf("/") + 1);
var path = location.hostname + ":" + location.port + appName + "websocket";
var websocketURL = "ws://localhost:5080/LiveApp/websocket";
webRTCAdaptor = new WebRTCAdaptor({
websocket_url: websocketURL,
mediaConstraints: mediaConstraints,
peerconnection_config: pc_config,
sdp_constraints: sdpConstraints,
remoteVideoId: "remoteVideo",
isPlayMode: true,
debug: true,
callback: function (info, description) {
if (info == "initialized") {
console.log("initialized");
webRTCAdaptor.getStreamInfo(name);
} else if (info == "streamInformation") {
console.log("stream information");
webRTCAdaptor.play(name, token);
} else if (info == "play_started") {
//joined the stream
console.log("play started");
// document.getElementById("video_info").style.display = "none";
// playWebRTCVideo();
} else if (info == "play_finished") {
//leaved the stream
console.log("play finished");
//check that publish may start again
setTimeout(function () {
webRTCAdaptor.getStreamInfo(name);
}, 3000);
} else if (info == "closed") {
//console.log("Connection closed");
if (typeof description != "undefined") {
console.log("Connecton closed: " + JSON.stringify(description));
}
}
},
callbackError: function (error) {
//some of the possible errors, NotFoundError, SecurityError,PermissionDeniedError
console.log("error callback: " + JSON.stringify(error));
}
});
}
</script>
</body>
</html>
Here is HLS 360 Live Video sample page:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<script src="https://aframe.io/releases/1.0.4/aframe.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/hls.js#latest"></script>
<script src="https://rawgit.com/oscarmarinmiro/aframe-video-controls/master/dist/aframe-video-controls.min.js"></script>
<title>Title</title>
</head>
<body>
<a-scene inspector="https://cdn.jsdelivr.net/gh/aframevr/aframe-inspector#master/dist/aframe-inspector.min.js">
<a-box position="-10 -3 -6" rotation="45 45 45" color="red" id="play"
>
</a-box>
<video id="video" autoplay loop crossorigin="anonymous" muted controls src="http://localhost:5080/LiveApp/streams/antmedia.m3u8">
</video>
<a-text position="-8 3 -8" value="Hello, World!" color="red" scale="3 3 3"></a-text>
<a-plane position="0 0 -4" rotation="-90 0 0" width="4" height="4" color="#7BC8A4"></a-plane>
<a-entity distance="0.0" video-controls="src:#video"></a-entity>
<a-camera>
<a-cursor color="#FF0000"></a-cursor>
</a-camera>
<a-videosphere src="#video" rotation="0 180 0" style="background-color: antiquewhite"></a-videosphere>
</a-scene>
</body>
</html>
<script>
let video = document.getElementById('video');
if(Hls.isSupported()) {
var hls = new Hls({
debug: true
});
hls.loadSource('http://localhost:5080/LiveApp/streams/antmedia.m3u8');
hls.attachMedia(video);
hls.on(Hls.Events.MEDIA_ATTACHED, function() {
video.muted = true;
video.play();
});
}
// hls.js is not supported on platforms that do not have Media Source Extensions (MSE) enabled.
// When the browser has built-in HLS support (check using `canPlayType`), we can provide an HLS manifest (i.e. .m3u8 URL) directly to the video element throught the `src` property.
// This is using the built-in support of the plain video element, without using hls.js.
else if (video.canPlayType('application/vnd.apple.mpegurl')) {
video.src = 'http://localhost:5080/LiveApp/streams/antmedia.m3u8';
video.addEventListener('canplay',function() {
video.play();
});
}
let box = document.getElementById("play");
box.addEventListener("mouseenter",function(){
box.setAttribute("scale",{
x:3,
y:3,
z:3
})
});
box.addEventListener("click",function(){
box.setAttribute("color","blue");
video.pause();
});
</script>
These samples configured by antmedia Stream ID and LiveApp application name. You can change according to your Stream ID and Application.

Connection FailedVIDYO_CONNECTORFAILREASON_ConnectionFailed onFailure

I just wrote a basic JavaScript with a button that joins a conference. I used, the the APP ID, Key and the username to generate a token with -jar. On the web console it says "Connection FailedVIDYO_CONNECTORFAILREASON_ConnectionFailed onFailure". Any inputs on this are much appreciated, Thanks!
Below is the code:
<html>
<head><title>Video Handler</title>
<meta http-equiv="content-type" content="text/html"; charset="utf-8"/>
</head>
<body>
<script>
var vidyoConnector;
function onVidyoClientLoaded(status){
console.log("VidyoClient load state"+status.state);
if(status.state== "READY"){
VC.CreateVidyoConnector({
viewId:"renderer",
viewStyle:"VIDYO_CONNECTORVIEWSTYLE_Default",
remoteParticipants:2,
logFileFilter:"error",
logFileName:"",
userData:""
}).then(function (vc){
console.log("Create Success");
}).catch(function(error){
});
}
}//end of vidyo client
function joinCall(){
vidyoConnector.Connect({
host:"prod.vidyo.io",
token:"cHJvdmlzaW9uAGF4YjM4ODIwQHVjbW8uZWR1QDdlNjE4Yi52aWR5by5pbwA2MzcwMzA5NjQ0NAAANDA0MjQ1MmE3N2RlYzA3ZGEwYmNjNTAzYWUzMTVhZWVjNzUzNmQ1NTBiMGU2NDQ3NTY0MzMxODI5ODRkMmU3YzQ0ODBhMTI3YzlkODE5Yjk2OGFjYjY4YWViNmRlOTcw",
displayName:"Arun",
resourceId:"IntranetEngineeringConference",
onSuccess:function(){
console.log("Connected to the Conference");
},
onFailure:function(reason){
console.error("Connection Failed");},//end of OnFailure
onDisconnected: function(reason){
console.log("disconnected -"+reason);
}//end of OnDisconnect
})
}//end of videocal function
</script>
<script src="https://static.vidyo.io/latest/javascript/VidyoClient/VidyoClient.js?onload=onVidyoClientLoaded"></script>
<h3>Hello Video Test</h3>
<button onclick="joinCall()">Join Conference</button>
<div id="renderer"></div>
I believe the problem is that you did not set the vidyoConnector.
Try the following code instead:
var vidyoConnector;
function onVidyoClientLoaded(status){
console.log("VidyoClient load state"+status.state);
if(status.state== "READY"){
VC.CreateVidyoConnector({
viewId:"renderer",
viewStyle:"VIDYO_CONNECTORVIEWSTYLE_Default",
remoteParticipants:2,
logFileFilter:"error",
logFileName:"",
userData:""
}).then(function (vc){
vidyoConnector = vc;
console.log("Create Success");
}).catch(function(error){
});
}
}//end of vidyo client
Let me know if this helps.

cross rider extension to fetch new posts from feed using google feeds api

I am trying to create an extension to display all the latest posts fetched from my feed using google feeds api. To implement this, I have added this code in background.js:
appAPI.ready(function() {
// Global variable to hold the toggle state of the button
var buttonState = true;
// Sets the initial browser icon
appAPI.browserAction.setResourceIcon('images/icon.png');
// Sets the tooltip for the button
appAPI.browserAction.setTitle('My Postreader Extension');
appAPI.browserAction.setPopup({
resourcePath:'html/popup.html',
height: 300,
width: 300
});});
and in popup.html,
<!DOCTYPE html><html><head><meta http-equiv="X-UA-Compatible" content="IE=edge">
<script type="text/javascript">
function crossriderMain($) {eval(appAPI.resources.get('script.js')); }</script>
</head>
<body><div id="feed"></div></body></html>
The script.js file is-
google.load("feeds", "1");
function initialize() {
var feed = new google.feeds.Feed("http://www.xxxxx.com/feed/");
feed.setNumEntries(10);
feed.load(function(result) {
if (!result.error) {
var container = document.getElementById("feed");
for (var i = 0; i < result.feed.entries.length; i++) {
var entry = result.feed.entries[i];
var div = document.createElement("div");
var link = document.createElement('a');
link.setAttribute('href', entry.link);
link.setAttribute('name', 'myanchor');
div.appendChild(document.createTextNode(entry.title));
div.appendChild(document.createElement('br'));
div.appendChild(link);
div.appendChild(document.createElement('br'));
container.appendChild(div);
}
}
});
}
google.setOnLoadCallback(initialize);
But I am unable to get desired result.The popup doesn't display anything.It just remain blank.
Since you are using a resource file for the popup's content, it's best to load the remote script from the crossriderMain function, as follows:
<!DOCTYPE html>
<html>
<head>
<!-- This meta tag is relevant only for IE -->
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<script type="text/javascript">
function crossriderMain($) {
appAPI.db.async.get('style-css', function(rules) {
$('<style type="text/css">').text(rules).appendTo('head');
});
appAPI.request.get({
url: 'http://www.google.com/jsapi',
onSuccess: function(code) {
$.globalEval(code);
appAPI.db.async.get('script-js', function(code) {
// runs in the context of the extension
$.globalEval(code.replace('CONTEXT','EXTN'));
// Alternatively, run in context of page DOM
$('<script type="text/javascript">').html(code.replace('CONTEXT','PAGE DOM')).appendTo('head');
});
}
});
}
</script>
</head>
<body>
<h1>Hello World</h1>
<div id="feed"></div>
</body>
</html>
[Disclaimer: I am a Crossrider employee]

Looking for working example of WP7 PhoneGap Facebook plugin for signin button

I've tried all the code at https://github.com/davejohnson/phonegap-plugin-facebook-connect
that is recommended by the phonegap community, but i keep running into errors trying to get it to work.
As you can see I'm using cordova 1.6.0 which maybe the problem?
i've added the script files to in my html page
<script type="text/javascript" charset="utf-8" src="cordova-1.6.0.js"></script>
<script type="text/javascript" charset="utf-8" src="cdv-plugin-fb-connect.js">/script>
<script type="text/javascript" charset="utf-8" src="facebook_js_sdk.js"></script>
<script type="text/javascript" charset="utf-8" src="ChildBrowser.js"></script>
And I've added the ChildBrowserCommand.cs into the plugins directory.
I then added this to device ready listener with my authentic app id (the real id not shown here)
document.addEventListener("deviceready",onDeviceReady,false);
// once the device ready event fires, you can safely do your thing! -jm
function onDeviceReady() {
//document.getElementById("welcomeMsg").innerHTML += "Cordova is ready! version=" + window.device.cordova;
console.log("onDeviceReady. You should see this message in Visual Studio's output window.");
//fb connect sign in
try {
//alert('Device is ready! Make sure you set your app_id below this alert.');
console.log('Device is ready! Make sure you set your app_id below this alert.');
FB.Cookie.setEnabled(true); // this seems to be duplicate to 'cookie: true' below, but it is IMPORTANT due to FB implementation logic.
FB.init({ appId: "311961255484993", nativeInterface: CDV.FB, cookie: true });
login();
} catch (e) {
//alert(e);
console.log("Init error: " + e);
}
};
function login() {
FB.login(
function (response) {
if (response.session) {
console.log('logged in');
} else {
console.log('not logged in');
}
},
{ scope: 'email, read_stream, read_friendlists' }
);
}
The error i get is
Unable to locate command :: org.apache.cordova.facebook.Connect
Any help?
EDIT: I also realize it's coming from cdv-plugin-fb-connect.js in here but not sure why?
cordova.exec(function () {
var authResponse = JSON.parse(localStorage.getItem('cdv_fb_session') || '{"expiresIn":0}');
if (authResponse && authResponse.expirationTime) {
var nowTime = (new Date()).getTime();
if (authResponse.expirationTime > nowTime) {
// Update expires in information
updatedExpiresIn = Math.floor((authResponse.expirationTime - nowTime) / 1000);
authResponse.expiresIn = updatedExpiresIn;
localStorage.setItem('cdv_fb_session', JSON.stringify(authResponse));
FB.Auth.setAuthResponse(authResponse, 'connected');
}
}
console.log('Cordova Facebook Connect plugin initialized successfully.');
}, (fail ? fail : null), 'org.apache.cordova.facebook.Connect', 'init', [apiKey]);
},

file path is not found or fail to open/read file Phonegap

Our code always goes to fail section. However, we have changed path several time like
'file:///android_asset/www/readme.txt' ,
'../android_asset/www/readme.txt',
'/www/readme.txt", "readme.txt'.
[We have taken "readme.txt" file in www folder]
We picked up the code from the below link.
http://docs.phonegap.com/phonegap_file_file.md.html
I asked a similar question and couldn't really find a solution. Here's a complete example of our method call:
window.resolveLocalFileSystemURI("file:///android_asset",
function(entry){
console.log(entry.fullPath);},
function(evt){
console.log(evt.code);}
);
However during the first phase we've only got a undefined error code while in a fresh testing project we receive error code 1 (file not found, line 56).
By the way: did you realised you miss a backslash? Try to reference file:///android_asset and if it works let me know what you did :)
<!DOCTYPE html>
<html>
<head>
<title>FileReader Example</title>
<script type="text/javascript" charset="utf-8" src="cordova-1.5.0.js"></script>
<script type="text/javascript" charset="utf-8">
// Wait for PhoneGap to load
//
function onLoad() {
document.addEventListener("deviceready", onDeviceReady, false);
}
// PhoneGap is ready
//
function onDeviceReady() {
window.requestFileSystem(LocalFileSystem.PERSISTENT, 0, gotFS, fail);
}
function gotFS(fileSystem) {
fileSystem.root.getFile("file:///sdcard/example.txt", {create: true}, gotFileEntry, fail);
}
function gotFileEntry(fileEntry) {
fileEntry.file(gotFile, fail);
}
function gotFile(file){
readDataUrl(file);
readAsText(file);
}
function readDataUrl(file) {
var reader = new FileReader();
reader.onloadend = function(evt) {
console.log("Read as data URL");
console.log(evt.target.result);
};
reader.readAsDataURL(file);
}
function readAsText(file) {
var reader = new FileReader();
reader.onloadend = function(evt) {
console.log("Read as text");
console.log(evt.target.result);
};
reader.readAsText(file);
}
function fail(evt) {
console.log(evt.target.error.code);
}
</script>