realtime audio on ze web

                APIs, Demos, Anwendungen
                Jan Krutisch <jan.krutisch@mindmatters.de>
                für #hhjs, 21.Februar 2011




Montag, 21. Februar 2011
Moi.




Montag, 21. Februar 2011
Realtime audio?




Montag, 21. Februar 2011
In/Out




Montag, 21. Februar 2011
Status Quo.




Montag, 21. Februar 2011
nada.




Montag, 21. Februar 2011
flash.




Montag, 21. Februar 2011
horizont.




Montag, 21. Februar 2011
Montag, 21. Februar 2011
Audio Data API




Montag, 21. Februar 2011
<audio/>




Montag, 21. Februar 2011
10 beta




Montag, 21. Februar 2011
Montag, 21. Februar 2011
Montag, 21. Februar 2011
Montag, 21. Februar 2011
Web Audio API




Montag, 21. Februar 2011
AudioContext




Montag, 21. Februar 2011
Das wars.




Montag, 21. Februar 2011
Ja, sorry, ey.




Montag, 21. Februar 2011
Code, or it didn‘t happen




Montag, 21. Februar 2011
<!DOCTYPE html>
                <html>
                  <head>
                    <title>JavaScript Audio Write Example</title>
                  </head>
                  <body>
                    <input type="text" size="4" id="freq" value="440"><label for="hz">Hz</label>
                    <button onclick="start()">play</button>
                    <button onclick="stop()">stop</button>

                    <script type="text/javascript">
                      function AudioDataDestination(sampleRate, readFn) {
                        // Initialize the audio output.
                        var audio = new Audio();
                        audio.mozSetup(1, sampleRate);

                          var currentWritePosition = 0;
                          var prebufferSize = sampleRate / 2; // buffer 500ms
                          var tail = null;

                          // The function called with regular interval to populate
                          // the audio output buffer.
                          setInterval(function() {
                            var written;
                            // Check if some data was not written in previous attempts.
                            if(tail) {
                              written = audio.mozWriteAudio(tail);
                              currentWritePosition += written;
                              if(written < tail.length) {
                                // Not all the data was written, saving the tail...
                                tail = tail.slice(written);
                                return; // ... and exit the function.
                              }
                              tail = null;
                            }

                            // Check if we need add some data to the audio output.
                            var currentPosition = audio.mozCurrentSampleOffset();
                            var available = currentPosition + prebufferSize - currentWritePosition;
                            if(available > 0) {
                              // Request some sound data from the callback function.
                              var soundData = new Float32Array(available);
                              readFn(soundData);

                              // Writting the data.
                              written = audio.mozWriteAudio(soundData);
                              if(written < soundData.length) {
                                // Not all the data was written, saving the tail.
                                tail = soundData.slice(written);
                              }
                              currentWritePosition += written;
                            }
                          }, 100);
                      }

                      // Control and generate the sound.

                      var frequency = 0, currentSoundSample;
                      var sampleRate = 44100;

                      function requestSoundData(soundData) {
                        if (!frequency) {
                          return; // no sound selected
                        }

                          var k = 2* Math.PI * frequency / sampleRate;
                          for (var i=0, size=soundData.length; i<size; i++) {
                            soundData[i] = Math.sin(k * currentSoundSample++);
                          }
                      }

                      var audioDestination = new AudioDataDestination(sampleRate, requestSoundData);

                      function start() {
                        currentSoundSample = 0;
                        frequency = parseFloat(document.getElementById("freq").value);
                      }

                      function stop() {
                        frequency = 0;
                      }
                  </script>




Montag, 21. Februar 2011
[...]

                     var audio = new Audio();
                     audio.mozSetup(1, sampleRate);

                [...]

                     written = audio.mozWriteAudio(tail);

                [...]




Montag, 21. Februar 2011
https://wiki.mozilla.org/Audio_Data_API#Writing_Audio




Montag, 21. Februar 2011
context = new webkitAudioContext();

                           var jsNode = context.createJavaScriptNode(8192, 0, 2);

                           jsNode.connect(context.destination);

                           jsNode.onaudioprocess = requestSoundData;




Montag, 21. Februar 2011
Anwendungen




Montag, 21. Februar 2011
Musik




Montag, 21. Februar 2011
Spiele




Montag, 21. Februar 2011
(
                Build and they will come
                )


Montag, 21. Februar 2011
Meine kleine Ecke




Montag, 21. Februar 2011
http://webloop.pixelpoke.de




Montag, 21. Februar 2011
http://github.com/halfbyte/soundbridge.js




Montag, 21. Februar 2011
soundbridge = SoundBridge(2, 44100, '..');

                [...]
                  soundbridge.setCallback(calc);
                  soundbridge.play();
                }, 1000);




Montag, 21. Februar 2011

realtime audio on ze web @ hhjs