i'm using freeswitch 1.6 and following cookbok to implement webrtc. for this i download sip.js#0.7.0 too. and have created call.html, call.js and answer.html, answer.js pages. my call.html including js is
<html>
<body>
<button id="startCall">Start Call</button>
<button id="endCall">End Call</button>
<br/>
<video id="remoteVideo"></video>
<br/>
<video id="localVideo" muted="muted" width="128px" height="96px"></video>
<!--<script src="js/sip-0.7.0.min.js"></script>-->
<!--<script src="call.js"></script>-->
</body>
<HEAD>
<script src="js/sip-0.7.0.min.js"></script>
<script>
var session;
console.log('hiiiiiiiiiiii')
var endButton = document.getElementById('endCall');
endButton.addEventListener("click", function () {
session.bye();
alert("Call Ended");
}, false);
console.log('hiiiii2')
var userAgent = new SIP.UA({
uri: 'sip:anonymous#gmaruzz.org',
wsServers: ["ws://call.sia.co.in:5066"],
authorizationUser: 'anonymous',
password: 'welcome'
});
console.log('hiiii3')
var startButton = document.getElementById('startCall');
startButton.addEventListener("click", function () {
session =userAgent.invite('sip:1010#139.59.17.63', options);
alert("Call Started");
}, false);
console.log('hiiii4')
var options = {
media: {
constraints: {
audio: true,
video: true
},
render: {
remote:document.getElementById('remoteVideo'),
local: document.getElementById('localVideo')
}
}
};
</script>
</HEAD>
</html>
please correct me where i'm going wrong. Thanks in advance.
you must put the wsServers in transportOptions like :
var userAgent = new SIP.UA({
uri: 'sip:anonymous#gmaruzz.org'
transportOptions: {
wsServers: "ws://call.sia.co.in:5066"
},
authorizationUser: 'anonymous',
password: 'welcome'
Related
I need to know if live 360 videos are supported in HLS or DASH. That feature is really important to me.
Ant Media Server supports 360 Live Video. You can use both HLS and WebRTC with 360 Live Video.
Here is WebRTC 360 Live Video sample page:
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8">
<title>Ant Media Server</title>
<script src="https://aframe.io/releases/1.0.4/aframe.min.js"></script>
<script src="https://raw.githubusercontent.com/ant-media/StreamApp/master/src/main/webapp/js/webrtc_adaptor.js"></script>
<script src="https://webrtc.github.io/adapter/adapter-latest.js"></script>
<script src="https://code.jquery.com/jquery-3.4.1.min.js"></script>
<script>
setTimeout(function(){
$(".a-enter-vr-button").click();
},3000);
AFRAME.registerComponent('vr-mode-on',{
schema:{
tagName:{type:'string',default:"video"}
},
init:function(){
this.el.addEventListener("click",function() {
$(".a-enter-vr-button").click();
});
}
});
</script>
</head>
<body>
<a-scene inspector="https://cdn.jsdelivr.net/gh/aframevr/aframe-inspector#master/dist/aframe-inspector.min.js">
<a-box position="-10 -3 -6" rotation="45 45 45" color="red" id="play" vr-mode-on
>
</a-box>
<video id="remoteVideo" autoplay controls playsinline></video>
<a-text position="-8 3 -8" value="Hello, World!" color="red" scale="3 3 3"></a-text>
<a-plane position="0 0 -4" rotation="-90 0 0" width="4" height="4" color="#7BC8A4"></a-plane>
<a-entity distance="0.0" video-controls="src:#video"></a-entity>
<a-camera>
<a-cursor color="#FF0000"></a-cursor>
</a-camera>
<a-videosphere src="#remoteVideo" rotation="0 180 0" style="background-color: antiquewhite"></a-videosphere>
</a-scene>
<script>
let video ;
var playOrder = ["webrtc"];
var name = "antmedia";
var token = "null";
var webRTCAdaptor = null;
var streamsFolder = "streams";
initializeWebRTCPlayer(name, token);
function initializeWebRTCPlayer(name, token, noStreamCallback) {
video =document.getElementById("remoteVideo");
document.getElementById("remoteVideo").style.display = "block";
var pc_config = null;
var sdpConstraints = {
OfferToReceiveAudio: true,
OfferToReceiveVideo: true
};
var mediaConstraints = {
video: false,
audio: false
};
var appName = location.pathname.substring(0, location.pathname.lastIndexOf("/") + 1);
var path = location.hostname + ":" + location.port + appName + "websocket";
var websocketURL = "ws://localhost:5080/LiveApp/websocket";
webRTCAdaptor = new WebRTCAdaptor({
websocket_url: websocketURL,
mediaConstraints: mediaConstraints,
peerconnection_config: pc_config,
sdp_constraints: sdpConstraints,
remoteVideoId: "remoteVideo",
isPlayMode: true,
debug: true,
callback: function (info, description) {
if (info == "initialized") {
console.log("initialized");
webRTCAdaptor.getStreamInfo(name);
} else if (info == "streamInformation") {
console.log("stream information");
webRTCAdaptor.play(name, token);
} else if (info == "play_started") {
//joined the stream
console.log("play started");
// document.getElementById("video_info").style.display = "none";
// playWebRTCVideo();
} else if (info == "play_finished") {
//leaved the stream
console.log("play finished");
//check that publish may start again
setTimeout(function () {
webRTCAdaptor.getStreamInfo(name);
}, 3000);
} else if (info == "closed") {
//console.log("Connection closed");
if (typeof description != "undefined") {
console.log("Connecton closed: " + JSON.stringify(description));
}
}
},
callbackError: function (error) {
//some of the possible errors, NotFoundError, SecurityError,PermissionDeniedError
console.log("error callback: " + JSON.stringify(error));
}
});
}
</script>
</body>
</html>
Here is HLS 360 Live Video sample page:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8">
<script src="https://aframe.io/releases/1.0.4/aframe.min.js"></script>
<script src="https://cdn.jsdelivr.net/npm/hls.js#latest"></script>
<script src="https://rawgit.com/oscarmarinmiro/aframe-video-controls/master/dist/aframe-video-controls.min.js"></script>
<title>Title</title>
</head>
<body>
<a-scene inspector="https://cdn.jsdelivr.net/gh/aframevr/aframe-inspector#master/dist/aframe-inspector.min.js">
<a-box position="-10 -3 -6" rotation="45 45 45" color="red" id="play"
>
</a-box>
<video id="video" autoplay loop crossorigin="anonymous" muted controls src="http://localhost:5080/LiveApp/streams/antmedia.m3u8">
</video>
<a-text position="-8 3 -8" value="Hello, World!" color="red" scale="3 3 3"></a-text>
<a-plane position="0 0 -4" rotation="-90 0 0" width="4" height="4" color="#7BC8A4"></a-plane>
<a-entity distance="0.0" video-controls="src:#video"></a-entity>
<a-camera>
<a-cursor color="#FF0000"></a-cursor>
</a-camera>
<a-videosphere src="#video" rotation="0 180 0" style="background-color: antiquewhite"></a-videosphere>
</a-scene>
</body>
</html>
<script>
let video = document.getElementById('video');
if(Hls.isSupported()) {
var hls = new Hls({
debug: true
});
hls.loadSource('http://localhost:5080/LiveApp/streams/antmedia.m3u8');
hls.attachMedia(video);
hls.on(Hls.Events.MEDIA_ATTACHED, function() {
video.muted = true;
video.play();
});
}
// hls.js is not supported on platforms that do not have Media Source Extensions (MSE) enabled.
// When the browser has built-in HLS support (check using `canPlayType`), we can provide an HLS manifest (i.e. .m3u8 URL) directly to the video element throught the `src` property.
// This is using the built-in support of the plain video element, without using hls.js.
else if (video.canPlayType('application/vnd.apple.mpegurl')) {
video.src = 'http://localhost:5080/LiveApp/streams/antmedia.m3u8';
video.addEventListener('canplay',function() {
video.play();
});
}
let box = document.getElementById("play");
box.addEventListener("mouseenter",function(){
box.setAttribute("scale",{
x:3,
y:3,
z:3
})
});
box.addEventListener("click",function(){
box.setAttribute("color","blue");
video.pause();
});
</script>
These samples configured by antmedia Stream ID and LiveApp application name. You can change according to your Stream ID and Application.
I installed the plugin as mentioned in the docs
I cant find a way to get it working.
When I run the apk on android I get this:
Uncaught ReferenceError: facebookConnectPlugin is not defined(…)
Here are me files.
index.html
<body>
<div class="app">
<div id="fb-root"></div>
<h1>Apache Cordova</h1>
<div id="deviceready" class="blink">
<p class="event listening">Connecting to Device</p>
<p class="event received">Device is Ready</p>
</div>
</div>
<script type="text/javascript" src="cordova.js"></script>
<script type="text/javascript" src="js/index.js"></script>
</body>
<div id="fb-root"></div>
index.js
var app = {
// Application Constructor
initialize: function() {
this.bindEvents();
},
// Bind Event Listeners
//
// Bind any events that are required on startup. Common events are:
// 'load', 'deviceready', 'offline', and 'online'.
bindEvents: function() {
document.addEventListener('deviceready', this.onDeviceReady, false);
},
// deviceready Event Handler
//
// The scope of 'this' is the event. In order to call the 'receivedEvent'
// function, we must explicitly call 'app.receivedEvent(...);'
onDeviceReady: function() {
app.receivedEvent('deviceready');
},
// Update DOM on a Received Event
receivedEvent: function(id) {
var parentElement = document.getElementById(id);
var listeningElement = parentElement.querySelector('.listening');
var receivedElement = parentElement.querySelector('.received');
listeningElement.setAttribute('style', 'display:none;');
receivedElement.setAttribute('style', 'display:block;');
console.log('Received Event: ' + id);
//FB login
facebookConnectPlugin.login(
["public_profile"],
fbLoginSuccess,
function (error) { alert("" + error) }
);
var fbLoginSuccess = function (userData) {
alert("UserInfo: " + JSON.stringify(userData));
}
}
};
app.initialize();
I've had issues with using the FB plugin in my app development too. Sometimes it's necessary to include the FacebookConnectPlugin.js file in your index.html, as well as include this script, which injects the Facebook Javascript SDK into your app.
Source: https://github.com/driftyco/ng-cordova/issues/446
Other threads about the same issue:
Cordova/Phonegap-facebook-plugin Android: facebookConnectPlugin is not defined
facebookConnectPlugin is not defined
facebookConnectPlugin is not defined (ngCordova, Ionic app)
...
I am learning how to use Redux. I would like to create a simple application with only one button. When the button is clicked I want to do a rest api call and when the response comes back the response content needs to be displayed.
What I would like to do is send a store.dispatch(CardAction.GET_CARDS) message to Redux when user clicks on the button. I do not want to call rest api directly from the button's onClick handler.
When the answer is received I intend to to the same: send an event with store.dispatch(CardAction.UPDATE_UI) and somehow at the background I want to update the Redux's status.
I hope that this concept is align with React + Redux.
I have some JavaScript code done but some part of it are missing. Could you please help me to put parts together?
index.jsp
<!DOCTYPE html>
<%#page session="false"%>
<%#page contentType="text/html; charset=UTF-8" pageEncoding="UTF-8" %>
<html>
<head>
<meta http-equiv="CONTENT-TYPE" content="text/html; charset=UTF-8">
<base href="${pageContext.request.contextPath}/" />
<link rel="icon" type="image/x-icon" href="public/image/favicon.ico">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/latest/css/bootstrap.min.css">
<link rel="stylesheet" href="https://maxcdn.bootstrapcdn.com/bootstrap/latest/css/bootstrap-theme.min.css">
</head>
<body>
<div id="root"></div>
<script type="text/javascript" src="bundle.js"></script>
</body>
</html>
App.js
let store = createStore(reducers);
ReactDom.render(
<Provider store={store}>
<Card/>
</Provider>,
document.getElementById('root')
);
Card.js
export default class Card extends React.Component {
render() {
return (
<div>
...
<Button onClick={() => store.dispatch(CardAction.GET_CARDS)}>rest call</Button>
</div>
)
}
}
ActionType.js
export const GET_CARDS = 'get-cards';
export const UPDATE_UI = 'update-ui';
CardAction.js
export function getCards(param1, param2) {
return createAction(ActionType.GET_CARDS, (param1, param2) => ({ value1, value2 }))
}
export function updateUi() {
return createAction(ActionType.UPDATE_UI)
}
RootReducer.js
export const reducers = (state = {}, action) => {
return action
};
RestClient.js
export default {
cardPost(param1, param2) {
const url = ...;
fetch(url, {
method: 'POST',
credentials: 'include'
})
.then(response => {
if (response.ok) {
console.info('rest response have arrived');
store.dispatch(CardAction.UPDATE_UI)
} else {
console.info('error appeared during calling rest api');
//store.dispatch(CardAction.SHOW_ERROR)
}
})
.catch(function(err) {
console.info(err + ' Url: ' + url)
})
}
}
You should never call store.dispatch() from a component. Instead, you should import a previously built action and let the Redux flow do the remaining stuff. The reducer shouldn't return an action, instead, it should return a new state, without mutating the previous one. I'd suggest you should first compensate some of the comprehensible lack of experience with Redux, and then you can try to follow along with a React-Redux-Rest tutorial like this one: https://medium.com/#rajaraodv/a-guide-for-building-a-react-redux-crud-app-7fe0b8943d0f#.cnat3gbcx
[EDIT]
Here's what I'd do
// component Card.js
import {getCards} from "CardAction";
export default class Card extends React.Component {
render() {
return (
<div>
...
<Button onClick={getCards(param1, param2)}>rest call</Button>
</div>
)
}
}
// action CardAction.js
const receivedCards = (cards) => ({
type: "RECEIVED_CARDS",
cards
})
export function getCards(param1, param2) {
// idk where you're gonna use these params btw
// also please note that fetch() isn't supported by older browsers. Here I'm showing you a simple example with axios, which basically performs the same operation. Feel free to adapt this example code as you want.
return function(dispatch) {
return axios({
url: server + "endpoint",
timeout: 20000,
method: 'get'
})
.then(function(response) {
let cards = response.data;
dispatch(receivedCards(cards));
})
.catch(function(response){
console.log(response.data.error);
})
}
};
// reducer reducer.js
const initialState = {};
export default (state = initialState, action) => {
switch(action.type) {
case "RECEIVED_CARDS":
return Object.assign({},
state,
{cards: action.cards});
default:
return state;
}
}
I want to display dojo select inside a dijit/TooltipDialog. The items of the dojo select are dynamically loaded. So I want to add this select programmaticaly. The content of TooltipDialog can be an object but select needs a domNode to be held. The code is :
<head>
<script>
require([
"dojo/parser",
"dijit/form/Select",
"dijit/TooltipDialog",
"dojo/on",
"dojo/dom",
"dojo/_base/lang",
"dijit/popup",
"dojo/data/ObjectStore",
"dojo/store/Memory",
"dojo/domReady!"
], function(parser,Select,TooltipDialog,on,dom,lang,popup, ObjectStore, Memory){
parser.parse();
var t={mySel:null};
var store = new Memory({
data: [
{ id: "foo", label: "Foo" },
{ id: "bar", label: "Bar" }
]
});
var os = new ObjectStore({ objectStore: store });
t.mySel = new Select({
store: os
}, "ttt");
var myTooltipDialog = new TooltipDialog({
content: t,
onMouseLeave: function(){
popup.close(myTooltipDialog);
}
});
on(dom.byId("mmm"),"mouseover" ,lang.hitch(this,function(e){
popup.open({
popup: myTooltipDialog,
orient: ["above-centered","above","below"],
around:dom.byId('mmm')
});
t.mySel.startup();
}));
t.mySel.on("change", function(){
console.log("my value: ", this.get("value"))
})
})
</script>
</head>
<body class="claro">
<div id="ttt" > tttt</div><br>
<div id="mmm" > tttt</div><br>
</body>
You are assignign an object t the tooltip content not a domenode
so try to make these change :
var myTooltipDialog = new TooltipDialog({
content: t.mySel.domNode,
onMouseLeave: function() {
popup.close(myTooltipDialog);
}
}
I am trying to create an extension to display all the latest posts fetched from my feed using google feeds api. To implement this, I have added this code in background.js:
appAPI.ready(function() {
// Global variable to hold the toggle state of the button
var buttonState = true;
// Sets the initial browser icon
appAPI.browserAction.setResourceIcon('images/icon.png');
// Sets the tooltip for the button
appAPI.browserAction.setTitle('My Postreader Extension');
appAPI.browserAction.setPopup({
resourcePath:'html/popup.html',
height: 300,
width: 300
});});
and in popup.html,
<!DOCTYPE html><html><head><meta http-equiv="X-UA-Compatible" content="IE=edge">
<script type="text/javascript">
function crossriderMain($) {eval(appAPI.resources.get('script.js')); }</script>
</head>
<body><div id="feed"></div></body></html>
The script.js file is-
google.load("feeds", "1");
function initialize() {
var feed = new google.feeds.Feed("http://www.xxxxx.com/feed/");
feed.setNumEntries(10);
feed.load(function(result) {
if (!result.error) {
var container = document.getElementById("feed");
for (var i = 0; i < result.feed.entries.length; i++) {
var entry = result.feed.entries[i];
var div = document.createElement("div");
var link = document.createElement('a');
link.setAttribute('href', entry.link);
link.setAttribute('name', 'myanchor');
div.appendChild(document.createTextNode(entry.title));
div.appendChild(document.createElement('br'));
div.appendChild(link);
div.appendChild(document.createElement('br'));
container.appendChild(div);
}
}
});
}
google.setOnLoadCallback(initialize);
But I am unable to get desired result.The popup doesn't display anything.It just remain blank.
Since you are using a resource file for the popup's content, it's best to load the remote script from the crossriderMain function, as follows:
<!DOCTYPE html>
<html>
<head>
<!-- This meta tag is relevant only for IE -->
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<script type="text/javascript">
function crossriderMain($) {
appAPI.db.async.get('style-css', function(rules) {
$('<style type="text/css">').text(rules).appendTo('head');
});
appAPI.request.get({
url: 'http://www.google.com/jsapi',
onSuccess: function(code) {
$.globalEval(code);
appAPI.db.async.get('script-js', function(code) {
// runs in the context of the extension
$.globalEval(code.replace('CONTEXT','EXTN'));
// Alternatively, run in context of page DOM
$('<script type="text/javascript">').html(code.replace('CONTEXT','PAGE DOM')).appendTo('head');
});
}
});
}
</script>
</head>
<body>
<h1>Hello World</h1>
<div id="feed"></div>
</body>
</html>
[Disclaimer: I am a Crossrider employee]