PWA Saving a photo taken without download prompt - progressive-web-apps

I am trying to build a progressive web app that utilizes the device camera to take pictures but want it to save to their Photos gallery without being prompted to download like the native camera app does..Is this even possible..I haven't discovered a way yet and not much info on google about it would it be different if it was uploaded to the cloud?
Thanks

The link at https://whatwebcando.today/photos.html suggests that this may be possible.
capturer = ImageCapture(streamVideoTrack)
Creates an image capturer out of the Media Stream Video Track.
capturer.takePhoto()
Returns a Promise resolved with the photo taken with the current settings.
capturer.setOptions(photoSettings)
Configures the photoSettings for subsequent captures; if visible, the effects of the configuration can be seen in the Track used as input.
This is the example code:
function getUserMedia(options, successCallback, failureCallback) {
var api = navigator.getUserMedia || navigator.webkitGetUserMedia ||
navigator.mozGetUserMedia || navigator.msGetUserMedia;
if (api) {
return api.bind(navigator)(options, successCallback, failureCallback);
}
}
var theStream;
function getStream() {
if (!navigator.getUserMedia && !navigator.webkitGetUserMedia &&
!navigator.mozGetUserMedia && !navigator.msGetUserMedia) {
alert('User Media API not supported.');
return;
}
var constraints = {
video: true
};
getUserMedia(constraints, function (stream) {
var mediaControl = document.querySelector('video');
if ('srcObject' in mediaControl) {
mediaControl.srcObject = stream;
mediaControl.src = (window.URL || window.webkitURL).createObjectURL(stream);
} else if (navigator.mozGetUserMedia) {
mediaControl.mozSrcObject = stream;
}
theStream = stream;
}, function (err) {
alert('Error: ' + err);
});
}
function takePhoto() {
if (!('ImageCapture' in window)) {
alert('ImageCapture is not available');
return;
}
if (!theStream) {
alert('Grab the video stream first!');
return;
}
var theImageCapturer = new ImageCapture(theStream.getVideoTracks()[0]);
theImageCapturer.takePhoto()
.then(blob => {
var theImageTag = document.getElementById("imageTag");
theImageTag.src = URL.createObjectURL(blob);
})
.catch(err => alert('Error: ' + err));
}
So it seems like you can take a photo... I can't see anywhere to indicate that you can then persist this photo in the users storage though. I'd love to be wrong.

Related

What Can I Do If Music Playback Pauses upon Song Switchover on the Status Bar

In my Huawei quick app, during music playback, when a user switches to another page in the app, and switches to playing another song using a status bar, the music playback pauses. How does it occurs?
Listen to audio events on the app home page, not only on the playback page. In this way, when the user leaves the playback page, each audio event can still be listened to, so as to control the playback logic.
Note: Huawei Quick App Engine does not support calling of the audio API in app.ux. Therefore, when a user exits the app, the quick app cannot receive the audio event callback even though the music is still playing in the background.
The following demo has two pages: Main (home page) and Audio. To avoid repeated code and ensure maintainability, separate the code for the Audio page as a common JavaScript for each page to call.
public utils.js:
import audio from '#system.audio';
export default{
listenAudio() {
var that=this;
console.info("util.js listenAudio ");
audio.onplay = function () {
console.log('audio onplay')
}
audio.onpause = function () {
console.log('audio onpause')
}
audio.onended = function () {
console.log('audio onended')
}
audio.ondurationchange = function () {
console.log('util.js ondurationchange')
var total = audio.duration
console.log('util.js ondurationchange total=' + total)
}
audio.ontimeupdate = function () {
var time = audio.currentTime
// console.log('util.js ontimeupdate time=' + time)
}
audio.onprevious = function () {
audio.cover = 'https://xx.jpg'
audio.title = "Piano music"
audio.artist = "Mozart"
// Replace with the music resource link.
audio.src = 'https://xx.mp3'
console.log(' util.js on previout event from notification ')
}
audio.onnext = function () {
audio.cover = 'xx.jpg'
audio.title = 'Pop';
audio.artist = 'Michael Jackson'
// Replace with the music resource link.
audio.src = 'https://xx.mp3'
console.log(' util.js on next event from notification ')
}
},
getAudioPlayState() {
audio.getPlayState({
success: function (data) {
console.log(`getAudioPlayState success: state: ${data.state},src:${data.src},
currentTime:${data.currentTime},autoplay:${data.autoplay},loop:${data.loop},
volume: ${data.volume},muted:${data.muted},notificationVisible:${data.notificationVisible}`);
},
fail: function (data, code) {
console.log('getAudioPlayState fail, code=' + code);
}
});
},
startPlay() {
audio.play();
},
pausePlay() {
audio.pause();
},
stopPlay() {
audio.stop();
},
seekProress(len) {
audio.currentTime = len;
},
setVolume(value) {
audio.volume = value;
},
setMute(isMuted) {
audio.muted = isMuted
},
setLoop(isloop) {
audio.loop = isloop
},
setStreamType() {
if (audio.streamType === 'music') {
audio.streamType = 'voicecall'
} else {
audio.streamType = 'music'
}
console.error('audio.streamType =' + audio.streamType);
},
setTitle(title) {
console.info('setTitle=' + title);
audio.title = title;
},
setArtist(artist) {
console.info('setArtist artist=' + artist) ;
audio.artist = artist;
},
setCover(src) {
console.info('setCover src=' + src);
audio.cover = src;
}
}
Add an audio event listener to the lifecycle method onShow of the Main page and call listenAudio in utils.js. Sample code:
<script>
import utils from '../Util/utils.js';
module.exports = {
onShow(options) {
utils.listenAudio();
},
}
</script>
Add an audio event listener to the lifecycle method onShow of the Audio page, and call listenAudio in utils.js. The progress callback event is listened separately because the playback progress needs to be displayed on the playback page. Sample code:
onShow(options) {
var that = this;
utils.listenAudio();
audio.ondurationchange = function () {
console.log('audio ondurationchange')
that.total = audio.duration
console.log('audio ondurationchange total=' + that.total)
}
audio.ontimeupdate = function () {
that.time = audio.currentTime
console.log('ontimeupdate time=' + that.time)
}
},
For more details, please check the following guide:
Quick App Audio Development Guide

Skype web sdk audio service plugin not found

I am integrating Skype Web SDK audio service in my IONIC application. Everything works fine but I am not able to make a call. When I click on button to make a call following code gets call and I am getting Beep sound and then error about Pluginnotinstalled.
var conversation = application.conversationsManager.getConversation('tel:+XXXX');
conversation.selfParticipant.audio.state.when('Connected', function () {
console.log('Connected to audio call');
});
conversation.state.changed(function (newValue, reason, oldValue) {
console.log('Conversation state changed from', oldValue, 'to', newValue);
});
conversation.participants.added(function (participant) {
console.log('Participant:', participant.displayName(), 'has been added to the conversation');
});
conversation.audioService.start().then(function() {
console.log('The call has been started successfully');
}, function (error) {
console.log('An error occured starting the call', error);
});
When I run this code, I am getting error, Plugin not Installed. There's no description about what plugin they want.
An error occured starting the call
Error: PluginNotInstalled
Exception — skype-web-sdk.js:20782
(anonymous function) — skype-web-sdk.js:35814
exec2 — skype-web-sdk.js:21498
exec — skype-web-sdk.js:21478
dequeue — skype-web-sdk.js:21253
process — skype-web-sdk.js:21274
When I checked in details, The error is coming from below code of skype-web-sdk.js
function init(specs) {
tm && tm.record(Web.TelemetryEvent.PluginManager, {
action: 'init',
state: state()
});
if (state() == Media.PluginManager.State.Uninitialized) {
var id = '__mainPluginManager_' + guid().replace(/-/g, '_');
Media.log('PluginManager::init - id = ' + id);
language = (specs && specs.language) || "en-us";
isRtl = (specs && specs.rtl) || false;
var PluginObjectCtor_1 = (specs && specs.PluginObject) || Media.PluginObject;
tm = specs && specs.tm;
assert(!task || task.state() != 'pending');
task = new Task('Loading the media plugin.', {
cancel: function (reason) {
Media.log('PluginManager::init canceled ' + id);
stopLoadTimer();
reset(reason);
task.reject(reason);
}
});
tm && tm.monitor(task.promise, Web.TelemetryEvent.PluginManager, {
action: 'initPluginMgr',
state: state(),
id: id
});
state.set(Media.PluginManager.State.Initializing);
isPluginInstalled.get().then(function (installed) {
if (!installed)
throw Exception('PluginNotInstalled');
pluginObj = PluginObjectCtor_1({
id: id,
managerId: '_'
});
pluginObj.event(onPluginObjectEvent, 'async');
pluginObj.state.changed(onPluginObjectState);
Media.watch('pluginObject(' + id + ')::state', state);
Media.log('PluginManager::init - creating inner object');
try {
pluginObj.createInnerObject({
hide: true,
hookEvents: true
});
}
catch (err) {
state.set(Media.PluginManager.State.Uninitialized);
if (task.state() == 'pending')
task.reject(err);
}
}).catch(function (err) {
state.set(Media.PluginManager.State.Uninitialized);
if (task.state() == 'pending')
task.reject(err);
});
}
else {
// init has already been called and the plugin is either
// initializing or is already initialized; in either case
// we will return an existing promise
assert(task);
}
return task.promise;
}
Which browser are you using?
IE11 and Safari both need the Skype for Business Web App plugin which can ben found here: Getting started with Skype Web SDK development
Here you can find more information on how to check if the plugin is installed.

How to detect whether the gps is enabled after switchToLocationSettings()?

I'm working out on the ionic project. I need to get the items based on current district. To get the current district I tried the below.
cordova.plugins.diagnostic.isLocationEnabled(function(enabled) {
alert("Location is " + (enabled ? "enabled" : "disabled"));
if(!enabled)
cordova.plugins.diagnostic.switchToLocationSettings();
var geocoder = new google.maps.Geocoder();
if (navigator.geolocation) {
navigator.geolocation.getCurrentPosition(successFunction, errorFunction);
}
function successFunction(position) {
var lat = position.coords.latitude;
var lng = position.coords.longitude;
}
function errorFunction(){
$ionicPopup.alert({
title:"Alert",
content:"Geocoder failed"
});
}
function codeLatLng(lat, lng) {
geocoder.geocode({'latLng': new google.maps.LatLng(lat, lng)}, function(results, status) {
if (status == google.maps.GeocoderStatus.OK) {
if (results[1]) {
for (var i=0; i<results[0].address_components.length; i++) {
for (var b=0;b<results[0].address_components[i].types.length;b++) {
if (results[0].address_components[i].types[b] == "administrative_area_level_2") {
district= results[0].address_components[i];
}
}
}
alert(distrcit.long_name);
}
else {
alert("No results");
}
}
else {
alert("Geocoder Failed");
}
});
}
}, function(error) {
});
This code is working fine if the GPS is enabled after switching to location settings. I'm getting the problem when the GPS is not enabled and getting back into the app. How can I detect whether it is enabled or not after moving to location settings and getting back to the app? Even though I referred many posts in StackOverflow, I didn't get the solution what I needed actually. So please help me out in solving it.
How can I detect whether it is enabled or not after moving to location settings and getting back to the app?
You can detect when your app is returned to the foreground using the resume event.
function onResume() {
cordova.plugins.diagnostic.isLocationEnabled(function(enabled) {
console.log("Location is " + (enabled ? "enabled" : "disabled"));
// etc.
}, function(error) {});
}
document.addEventListener("resume", onResume, false);

appcelerator titanium android camera native capture photo

i am working on a cross-platform mobile app using appcelerator titanium alloy. the code below opens the native camera functionality and works fine on apple devices. on android devices, the camera opens but the button to actually take a picture is disabled (greyed out). the buttons for taking video or changing camera settings work fine, just the take picture button is not working. any ideas? thanks in advance
the code below called on click of takePic button to open camera
takePic.addEventListener('click', function(e) {
var win = Titanium.UI.createWindow({ //Open Camera
});
Titanium.Media.showCamera({
success:function(event){
Ti.API.debug('Our type was: '+event.mediaTpe);
if(event.mediaType == Ti.Media.MEDIA_TYPE_PHOTO){
***win = Titanium.UI.currentWindow;
if(osName == "android"){
win.open();
}***
try {
var image_name = "siteing.jpg";
var fileImage = Titanium.Filesystem.getFile(Titanium.Filesystem.applicationDataDirectory, image_name);
fileImage.write(event.media);
Ti.API.log(event.media);
picURL = fileImage;//event.media;
picRaw = event.media; //Raw bytes of picture to save to database
pictureSet = true;
$.videoButton.enabled = false;
$.videoButton.backgroundColor = "#DDDDDD";
//$.audioButton.enabled = false;
format = "Picture";
$.savePic.show();
} catch(e){
alert("An Error:" + e.message);
}
} else {
var alert = createErrorAlert({text:"An error occured getting the media. Please check file size and format and try again."});
$.yesLatLon.add(alert);
alert.show();
}
if(osName == "android"){
win.open();
}
}, cancel:function(){
//called when user cancels taking a picture
if(osName == "android"){
//win.close();
}
}, error:function(error){
//called when there's an error
var a = Titanium.UI.createAlertDialog({title:'Camera'});
if(error.code == Titanium.Media.NO_CAMERA){
a.setMessage('Please run this test on device');
} else {
a.setMessage('Unexpected error: ' + error.code);
}
a.show();
}, saveToPhotoGallery:true,
//allowEditing and mediaTypes are iOS-only settings
allowEditing:true,
mediaTypes:[Ti.Media.MEDIA_TYPE_VIDEO, Ti.Media.MEDIA_TYPE_PHOTO]
});
alert.hide();
$.yesLatLon.removeEventListener('androidback', arguments.callee);
});
screenshot_from_phone
It's been a long time, but I did get this working and thought I'd post my code in case it helps someone else. Fair warning, I now face a different issue with the camera on certain Android devices (Samsung S-series and Google Pixel cannot open camera), which I'm posting in a separate question. However, the original issue with camera button being disabled was successfully resolved last year with the below code.
takePic.addEventListener('click', function(e) {
var win = Titanium.UI.createWindow({//Open Camera
});
Titanium.Media.showCamera({
success : function(event) {
if (event.mediaType == Ti.Media.MEDIA_TYPE_PHOTO) {
win = Titanium.UI.currentWindow;
if (osName == "android") {
var img_view = Titanium.UI.createImageView({
height : '100%',
width : '100%',
});
win.add(img_view);
}
try {
picURL = event.media;
picRaw = event.media;
pictureSet = true;
$.videoButton.enabled = false;
$.videoButton.backgroundColor = "#DDDDDD";
$.savePic.show();
format = "Picture";
} catch(e) {
alert("An Error:" + e.message);
}
} else {
var alert = createErrorAlert({
text : "An error occured getting the media. Please check file size and format and try again."
});
$.yesLatLon.add(alert);
alert.show();
}
},
cancel : function() {
//called when user cancels taking a picture
},
error : function(error) {
//called when there's an error
var a = Titanium.UI.createAlertDialog({
title : 'Camera'
});
if (error.code == Titanium.Media.NO_CAMERA) {
a.setMessage('Please run this test on device');
} else {
a.setMessage('Unexpected error: ' + error.code);
}
a.show();
},
saveToPhotoGallery : true,
allowEditing : false,
autohide : true, //Important!
mediaTypes : [Ti.Media.MEDIA_TYPE_PHOTO]
});
alert.hide();
});

Video.js player add chromecast button?

I have tried numerous ways of adding a cast button to video.js player but cannot do this for the life of me. Can anyone help?
I'm using the hellovideo cms for videos and need plugins added but have no idea about jquery etc.. so please if anyone can help?
There is a really nice plugin for this: https://github.com/kim-company/videojs-chromecast
Just follow the setup instructions (adding the js and css to your page).
I tried kim-company/videojs-chromecast. It only works with an older version of videojs, I used 5.4.6. It's quite buggy. Another I tried was benjipott/video.js-chromecast, which claims to work with newer videojs, but I didn't like it at all. So I gave up on videojs, I always found the native HTML5 video player more reliable and easier to work with (videojs just wraps this anyway). For the chromecast stuff, I provide a nearby button that links to chromecast.link, where I wrote a full web chromecast sender app. Pass the video and poster URL in the fragment, per this example:
https://chromecast.link/#content=http://host/some.mp4,poster=http://host/poster.jpg,subtitles=http://host/webvtt.srt
I recently answered this question, you can check it out here: How to implement chromecast support for html5 player for more information
var session = null;
$( document ).ready(function(){
var loadCastInterval = setInterval(function(){
if (chrome.cast.isAvailable) {
console.log('Cast has loaded.');
clearInterval(loadCastInterval);
initializeCastApi();
} else {
console.log('Unavailable');
}
}, 1000);
});
function initializeCastApi() {
var applicationID = chrome.cast.media.DEFAULT_MEDIA_RECEIVER_APP_ID;
var sessionRequest = new chrome.cast.SessionRequest(applicationID);
var apiConfig = new chrome.cast.ApiConfig(sessionRequest,
sessionListener,
receiverListener);
chrome.cast.initialize(apiConfig, onInitSuccess, onInitError);
};
function sessionListener(e) {
session = e;
console.log('New session');
if (session.media.length != 0) {
console.log('Found ' + session.media.length + ' sessions.');
}
}
function receiverListener(e) {
if( e === 'available' ) {
console.log("Chromecast was found on the network.");
}
else {
console.log("There are no Chromecasts available.");
}
}
function onInitSuccess() {
console.log("Initialization succeeded");
}
function onInitError() {
console.log("Initialization failed");
}
$('#castme').click(function(){
launchApp();
});
function launchApp() {
console.log("Launching the Chromecast App...");
chrome.cast.requestSession(onRequestSessionSuccess, onLaunchError);
}
function onRequestSessionSuccess(e) {
console.log("Successfully created session: " + e.sessionId);
session = e;
}
function onLaunchError() {
console.log("Error connecting to the Chromecast.");
}
function onRequestSessionSuccess(e) {
console.log("Successfully created session: " + e.sessionId);
session = e;
loadMedia();
}
function loadMedia() {
if (!session) {
console.log("No session.");
return;
}
var videoSrc = document.getElementById("myVideo").src;
var mediaInfo = new chrome.cast.media.MediaInfo(videoSrc);
mediaInfo.contentType = 'video/mp4';
var request = new chrome.cast.media.LoadRequest(mediaInfo);
request.autoplay = true;
session.loadMedia(request, onLoadSuccess, onLoadError);
}
function onLoadSuccess() {
console.log('Successfully loaded video.');
}
function onLoadError() {
console.log('Failed to load video.');
}
$('#stop').click(function(){
stopApp();
});
function stopApp() {
session.stop(onStopAppSuccess, onStopAppError);
}
function onStopAppSuccess() {
console.log('Successfully stopped app.');
}
function onStopAppError() {
console.log('Error stopping app.');
}