Get play position of audio buffer in web audio API - web-audio-api

I am making a browser based DJ app, and need an indicator to show how much of an audio track(audio buffer) has played on a waveform. I have a waveform but cant get the current position.

The Web Audio API uses a common time frame to synchronise all running and scheduled sounds, which can be read from AudioContext.currentTime. You can store the timestamp of when you start your sound and measure the elapsed time while the sound is played. By this you get the position in seconds. If you need the frame index position of the AudioBuffer you can multiply the elapsed time by the sampleRate of the AudioBuffer.
<!DOCTYPE HTML>
<html>
<head>
<title>Time Elapsed Demo</title>
<script>
const audioCtx = new AudioContext()
async function startSample() {
const output = document.getElementById("output")
const response = await fetch('test.wav')
const sample = await response.arrayBuffer()
const audioBuffer = await audioCtx.decodeAudioData(sample)
const sampleRate = audioBuffer.sampleRate
const sourceNode = audioCtx.createBufferSource()
sourceNode.buffer = audioBuffer
sourceNode.connect(audioCtx.destination)
let ended = false
sourceNode.onended = () => { ended = true }
const startTime = audioCtx.currentTime + 0.01
sourceNode.start(startTime)
function showTimeElapsed() {
if (ended == false) {
elapsed = audioCtx.currentTime - startTime;
if (elapsed >= 0) {
output.innerHTML =
"Time elapsed: " + elapsed.toFixed(3) + ", " +
"Frame index: " + (sampleRate * elapsed).toFixed(0)
}
requestAnimationFrame(showTimeElapsed)
}
}
showTimeElapsed()
}
</script>
</head>
<body>
<button id="start" onclick="startSample()">Start Sample</button>
<p>
<div id="output"></div>
</body>
</html>

Related

Creating a webpage to load audio file and then display spectrogram of the audio file using web audio

I am creating a webpage to load an audio file from my local drive and then display a spectrogram of the audio file. I am trying to do this using web audio API. However, my code does not run and I always get the following error message.
Any suggestion or solution that you could give is highly appreciated.
Thank you!
<!DOCTYPE html>
<html>
<head>
<title>Spectrogram</title>
</head>
<body>
<h1>Spectrogram</h1>
<form>
<label for="file-input">Select an audio file:</label>
<input type="file" id="file-input" accept="audio/*" />
<input type="submit" value="Submit" />
</form>
<canvas id="spectrogram" width="500" height="200"></canvas>
<script>
const fileInput = document.getElementById('file-input');
const spectrogramCanvas = document.getElementById('spectrogram');
// Listen for file selection
fileInput.addEventListener('change', function() {
// Get the selected file
const file = fileInput.files[0];
// Create a FileReader object
const reader = new FileReader();
// Listen for the file to be loaded
reader.addEventListener('load', function() {
// Create an audio context
const context = new AudioContext();
// Create an audio buffer from the audio file
context.decodeAudioData(reader.result, function(buffer) {
// Create a spectrogram
const spectrogram = createSpectrogram(buffer, context);
// Draw the spectrogram on the canvas
drawSpectrogram(spectrogram, spectrogramCanvas);
});
});
// Read the file as an array buffer
reader.readAsArrayBuffer(file);
});
// Create a spectrogram from an audio buffer
function createSpectrogram(buffer, context) {
// Create an offline audio context
const offlineContext = new OfflineAudioContext(buffer.numberOfChannels, buffer.length, buffer.sampleRate);
// Create a source node from the audio buffer
const source = offlineContext.createBufferSource();
source.buffer = buffer;
// Create a analyser node
const analyser = offlineContext.createAnalyser();
// Connect the source and analyser nodes
source.connect(analyser);
analyser.connect(offlineContext.destination);
// Start the source node
source.start(0);
// Render the audio
return offlineContext.startRendering();
}
// Draw a spectrogram on a canvas
function drawSpectrogram(spectrogram, canvas) {
// Get the canvas context
const context = canvas.getContext('2d');
// Get the image data for the spectrogram
const imageData = context.createImageData(spectrogram.width, spectrogram.height);
// Draw the spectrogram on the image data
spectrogram.forEach((value, i) => {
const j = i * 4;
imageData.data[j] = value;
imageData.data[j + 1] = value;
imageData.data[j + 2] = value;
imageData.data[j + 3] = 255;
});
// Draw the image data on the canvas
context.putImageData(imageData, 0, 0);
}
</script>
</body>
</html>

web audio soundcloud crossfade

I'm struggling to get this basic fade-in / fade-out Web Audio code to work with SoundCloud. It appears that the gainNode.gain.linearRampToValueAtTime functions are bypassed (ie, play starts and ends at gain.volume = 1.0.)
Code needs to work with iOS mobile Safari webkit.
CSS included:
<link href='http://fonts.googleapis.com/css?family=Open+Sans:400,600' rel='stylesheet' type='text/css'>
HTML Follows:
<div id="wrapper">
<div id="content">
<div class="post">
<h2>Fading in a Soundcloud Track</h2>
</p>
</div>
</div>
</div>
JS Follows:
<script>
var audioCtx = new (window.AudioContext ||
window.webAudioContext ||
window.webkitAudioContext)();
var audio = new Audio();
var url = 'https://api.soundcloud.com/tracks/179612876/stream' + '?client_id=<MY_ID>';
audio.src = url;
audio.preload = true;
audio.load();
audio.addEventListener('canplay', function(){
var currTime = audioCtx.currentTime;
var duration = 30;
var fadeTime = 6;
var gainNode = audioCtx.createGain();
gainNode.gain.value = 0.0;
var source = audioCtx.createMediaElementSource(audio);
source.connect(gainNode);
gainNode.connect(audioCtx.destination);
// Then fade in
gainNode.gain.linearRampToValueAtTime(0.0, currTime);
gainNode.gain.linearRampToValueAtTime(1.0, currTime + fadeTime);
// Then fade it out.
gainNode.gain.linearRampToValueAtTime(1.0, currTime + duration-fadeTime);
gainNode.gain.linearRampToValueAtTime(0.0, currTime + duration);
//source.connect(audioCtx.destination);
source.mediaElement.play();
},false);
</script>
"gainNode.gain.value = 0.0;" doesn't actually set a schedule point in the scheduler, it just sets the current value. Since there's no start point in the scheduler, it's jumping up when it hits the first schedule point (i.e. at time currTime + fadeTime). Try this:
gainNode.gain.setValueAtTime(0.0, currTime);
gainNode.gain.linearRampToValueAtTime(1.0, currTime + fadeTime);
// Then fade it out.
gainNode.gain.setValueAtTime(1.0, currTime + duration-fadeTime);
gainNode.gain.linearRampToValueAtTime(0.0, currTime + duration);
(and don't bother setting gain.gain.value).

Streaming through SoundManager2 for webradios is pending for a very long time

I'm developping a little javascript snippet that allow a user to listen a webradio in streaming using soundManager2. By giving an URL to this snippet, the stream is sending me music so it's cool, it works.
Yes, but...
Query to the URL can be very very long (above 2-3 min!). I would like to know if there's a workaround or option I can use to make this query faster.
Why, opening my m3u (which just contains the mp3 URL inside) with Windows Media Player, the loading spend only 5-10 sec max, while acces to the same URL in a browser or with soundManager2 is during 2-3 min, sometimes more?
Here is my jsFiddle trying with the OuïFM (French radio). The waiting time, for this radio, is about 115 seconds.
var url = 'http://ice39.infomaniak.ch:8000/ouifm3.mp3';
var time = 0;
var timing = setInterval(function() {
$('#Streaming').html((time / 10) + ' seconds.');
time++;
}, 100);
var start = new Date;
soundManager.onready(function() {
soundManager.createSound({
id:'Radio',
url:url,
autoLoad: true,
autoPlay: true,
multiShot: false,
onload: function() {
$('#Loading').css('display', 'none');
clearInterval(timing);
}
});
});
Your fiddle was broken because SoundManager2's javascript wasn't loading properly. After I fixed that (and some of your code), I was able to get the audio playing in under a second: http://jsfiddle.net/y8GDp/
var url = 'http://ice39.infomaniak.ch:8000/ouifm3.mp3';
var time = 0;
var timer = $('#Streaming');
var loading = $('#Loading');
var start = new Date;
var seconds = function(){
var diff = ((new Date).getTime() - start.getTime()) / 1000;
return diff + ' seconds.';
};
var timing = setInterval(function() {
timer.html(seconds());
}, 100);
soundManager.onready(function() {
soundManager.createSound({
id:'Radio',
url:url,
autoPlay: true,
onplay: function() {
clearInterval(timing);
loading.html('Finished loading');
timer.html(seconds());
}
});
});

HTML5 Video - currentTime not setting properly on iPhone

I have a basic HTML5 video set up from which I load one of four videos. The problem I'm having is that when I load the next video, it continues playing from the previous time position. Efforts to set the currentTime property seem to be either short lived or ignored entirely.
I have added listeners to a collection of events and have something like this in each one;
myPlayer.addEventListener("loadeddata", function() {
console.log(" loadeddata: before = " + myPlayer.currentTime);
myPlayer.currentTime = 0.1;
console.log(" loadeddata: after = " + myPlayer.currentTime);
}, false);
Sometimes I see the time change for one event but not persist correctly;
durationchange: before = 19.773332595825195
durationchange: after = 0.10000000149011612
loadedmetadata: before = 0.10000000149011612
loadedmetadata: after = 19.773332595825195
loadeddata: before = 19.773332595825195
loadeddata: after = 0.10000000149011612
canplay: before = 0.10000000149011612
canplay: after = 19.773332595825195
And sometimes it never even seems to set at all;
durationchange: before = 50.66666793823242
durationchange: after = 50.66666793823242
loadedmetadata: before = 50.66666793823242
loadedmetadata: after = 50.66666793823242
loadeddata: before = 50.66666793823242
loadeddata: after = 50.66666793823242
canplay: before = 50.66666793823242
canplay: after = 50.66666793823242
This seems similar to the issue here but there didn't seem to be any resolution. Has anyone encountered this issue on iPhone before?
TL;DR: change currentTime on the loadeddata event. This works for audio too.
It looks like Safari (and the problem is still appearing for me on Safari 11.1) is Safari will not allow currentTime to be changed when a video is first loaded IF it hasn't loaded a frame for that currentTime yet. The bigger problem: the wrong solution can break Chrome (and likely other browsers too).
Fortunately, we have a lot of events we can listen for while media is loading:
During the loading process of an audio/video, the following events occur, in this order:
loadstart
durationchange
loadedmetadata
loadeddata
progress
canplay
canplaythrough
-W3Schools (I know it's not a preferred source, but I couldn't find the same info on MDN)
I tried adjusting currentTime on different events, but on the first 3 events, Safari would move the time back to 0; and on 5 and 6 it seemed to prevent the video from playing in Chrome, because it would get stuck at currentTime (which I could've worked around, but I thought there was a better solution).
(I didn't want to have to load the whole file to go to the right spot, because I want to support hefty videos. So canplaythrough wasn't an option for me.)
The description for the loadeddata event reads:
The loadeddata event is fired when the first frame of the media has finished loading.
-MDN
When I changed currentTime on loadeddata, Safari could tell that the frame was loaded and available, and would update correctly. It also wouldn't cause Chrome to freeze in a single spot while playing.
The problem and solution are identical for audio.
From my findings the issue seems to be that on iPhone only (iPad works fine) the currentTime property will not be set correctly until the "canplaythrough" event, however changing the currentTime at that point will cause a noticeable hiccup. The solution for that would be to intentionally pause the video after calling load...
myVideo.load();
myVideo.pause();
...and then call play in the event when the time has reset.
The second problem however is when the duration of the new movie is shorter then the currentTime position. In this case not only does currentTime fail to set but "canplaythrough" is never called, and QT just sits at the end of the video doing nothing.
I discovered the solution to both problems was to force a secondary load if the currentTime was not reset in the event BEFORE "canplaythrough". Its a bit round about with the timer callback but it seems to do the trick;
var myVideo = document.getElementById("video1");
myVideo.addEventListener("canplay", function() {
console.log(" canplay: before = " + myVideo.currentTime);
myVideo.currentTime = 0.1;
console.log(" canplay: after = " + myVideo.currentTime);
if( myVideo.currentTime < 1 ) {
myVideo.play();
}
else {
myVideo.load();
myVideo.pause();
setTimeout(checkStarted, 500);
}
}, false);
function checkStarted()
{
console.log(" checkStarted called");
myVideo.play();
}
I had to set the preload attribute to metadata (on the HTML of the main video element) and set the currentTime of the video element within the loadedmetadata event listener.
<video id="myVideo" preload="metadata">
<source src="/path/to/video" type="video/mp4">
</video>
JS
VideoEl.addEventListener('loadedmetadata', VideoMetaDataLoaded);
function VideoMetaDataLoaded() {
VideoEl.currentTime = newTime;
}
Below is my Angular/Ionic solution to restore the video position. It works on IOS, Android, Chrome and Safari.
HTML:
<video preload="metadata"
poster="{{ resource.thumbnail_file }}"
playsinline webkit-playsinline
#videos>
...
</video>
Typescript:
#ViewChildren('videos') videos: QueryList<any>;
videoCache: Map<number, number> = new Map<number, number>();
private restoreVideoPositions() {
var context = this;
setTimeout(() => {
this.videos.forEach(function (item, idx) {
var video = item.nativeElement;
var currentTime = context.videoCache.get(video.id);
if (currentTime != null && currentTime > 0) {
console.log('add listener', currentTime);
video.addEventListener("loadeddata", function () {
if (video.readyState >= 3) {
video.currentTime = currentTime;
// video.play();
}
});
video.load();
}
});
}, 0);
}
private storeVideoPositions() {
var context = this;
this.videoCache.clear()
this.videos.forEach(function (item, idx) {
var video = item.nativeElement;
video.pause();
context.videoCache.set(video.id, video.currentTime)
});
}
I used a combination of the last two answers here, but had to reduce the ready state limit to 2 (HTMLVideoElement.prototype.HAVE_CURRENT_DATA) as the loadeddata event on iOS would often never come in higher than 2.
With help from the other answers here I came up with the next solution:
HTMLVideoElement.prototype.playFromTime = function(currentTime){
let that = this;
that.load();
that.pause();
that.currentTime = currentTime;
let loadedMetadata;
loadedMetadata = function(event){
that.currentTime = currentTime;
that.removeEventListener("loadedmetadata", loadedMetadata);
}
if(that.currentTime !== currentTime){
that.addEventListener("loadedmetadata", loadedMetadata);
}
that.play();
}
//usage example:
myVidElement.playFromTime(20);
Which in my situation works on an iPhone on Safari, Safari on the desktop, Firefox on macOS, Chrome on macOS. That's all I have tested for now.
Had the same problem with a play video on scroll with React. The following solved it for me.
useEffect(() => {
videoRef.current.load();
}, []);
const { render } = ReactDOM;
const { useState, useEffect, useRef } = React;
const Video = ({ src, height, length }) => {
const [currentTime, setCurrentTime] = useState(0);
const videoRef = useRef(null);
useEffect(() => {
// This is needed for IOS
videoRef.current.load();
window.addEventListener("scroll", handleScroll);
return () => window.removeEventListener("scroll", handleScroll);
}, []);
const getCurrentTime = () => {
const percentScrolled = window.scrollY / (height - window.innerHeight);
return length * percentScrolled;
};
const handleScroll = (e) => {
const time = getCurrentTime();
videoRef.current.currentTime = time;
setCurrentTime(time);
};
return (
<div style={{ height }}>
<p>Time: {currentTime.toFixed(2)}s</p>
<video ref={videoRef} muted playsInline>
<source src={src} type="video/mp4" />
Your browser does not support the video tag.
</video>
</div>
);
};
const App = () => {
return (
<Video
length={5}
height={3000}
src="https://lqez.github.io/js/airpodsvf/video.mp4"
/>
);
};
render(<App />, document.getElementById("root"));
body {
background: black;
}
p {
color: slategrey;
top: 0;
position: fixed;
z-index: 1;
padding: 20px;
}
video {
top: 60px;
position: fixed;
width: 100%;
display: block;
}
<script src="https://cdnjs.cloudflare.com/ajax/libs/react/16.9.0/umd/react.production.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/react-dom/16.9.0/umd/react-dom.production.min.js"></script>
<div id="root"></div>
this is a problem with chrome on local assets. happened with me. when i was serving video from local assets and setting time it resets the video to 0.
so the solution that worked for me was serving the video from s3 bucket.

Connecting a simple gain node to Web Audio API file buffers

I can't seem to understand why this isn't working. I have no errors.I have done this using oscillators and it works fine. FYI this is a continuation of this thread:
Using Web Audio API to assign two sounds to two Divs and play each independently via a click event with javascript
Code:
<div id="divElement"></div>
<style>
#divElement{
background-color: orange;
width:70px;
height:100px;
left:143px;
}
</style>
<script>
var context = new webkitAudioContext(),
savedBuffer;
var playAudioFile = function () {
var source = context.createBufferSource();
var gainNode = context.createGainNode();
source.buffer = savedBuffer;
// START OF PROBLEM AREA: Gain not working but file still plays with no errors.
source.connect(gainNode);
gainNode.connect(context.destination);
gainNode.gain = 0;
// END OF PROBLEM AREA
source.noteOn(0); // Play sound immediately
};
var request = new XMLHttpRequest();
request.open('get', 'audio/A.mp3', true);
request.responseType = 'arraybuffer';
request.onload = function () {
context.decodeAudioData(request.response,
function(incomingBuffer) {
savedBuffer = incomingBuffer;
var divElement = document.getElementById("divElement");
divElement.addEventListener("click", playAudioFile , false);
}
);
};
request.send();
</script>
Try gainNode.gain.value = 0 instead. gainNode.gain is an AudioGain object, which has the attribute value.
http://www.w3.org/TR/webaudio/#AudioGainNode